You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hop.apache.org by gi...@apache.org on 2021/03/13 12:45:15 UTC

[incubator-hop-docs] branch asf-site updated: Documentation updated to 42f8481

This is an automated email from the ASF dual-hosted git repository.

git-site-role pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/incubator-hop-docs.git


The following commit(s) were added to refs/heads/asf-site by this push:
     new 639054f  Documentation updated to 42f8481
639054f is described below

commit 639054f07ad36a54c4c479e22eac84590092ccd8
Author: jenkins <bu...@apache.org>
AuthorDate: Sat Mar 13 12:45:11 2021 +0000

    Documentation updated to 42f8481
---
 .../modules/.asciidoctor/pages/index.adoc          |  74 ++--
 hop-user-manual/modules/ROOT/nav.adoc              |  74 ++--
 .../ROOT/pages/database/databases/as400.adoc       |  32 ++
 .../ROOT/pages/database/databases/cache.adoc       |  32 ++
 .../ROOT/pages/database/databases/clickhouse.adoc  |  32 ++
 .../modules/ROOT/pages/database/databases/db2.adoc |  32 ++
 .../ROOT/pages/database/databases/derby.adoc       |  32 ++
 .../ROOT/pages/database/databases/exasol.adoc      |  32 ++
 .../ROOT/pages/database/databases/firebird.adoc    |  32 ++
 .../pages/database/databases/googlebigquery.adoc   |  32 ++
 .../ROOT/pages/database/databases/greenplum.adoc   |  32 ++
 .../modules/ROOT/pages/database/databases/h2.adoc  |  32 ++
 .../ROOT/pages/database/databases/hypersonic.adoc  |  32 ++
 .../ROOT/pages/database/databases/infinidb.adoc    |  32 ++
 .../ROOT/pages/database/databases/infobright.adoc  |  32 ++
 .../ROOT/pages/database/databases/informix.adoc    |  32 ++
 .../ROOT/pages/database/databases/ingres.adoc      |  32 ++
 .../ROOT/pages/database/databases/interbase.adoc   |  32 ++
 .../ROOT/pages/database/databases/kingbasees.adoc  |  32 ++
 .../ROOT/pages/database/databases/mariadb.adoc     |  32 ++
 .../ROOT/pages/database/databases/monetdb.adoc     |  32 ++
 .../ROOT/pages/database/databases/mssql.adoc       |  34 ++
 .../ROOT/pages/database/databases/mssqlnative.adoc |  32 ++
 .../ROOT/pages/database/databases/mysql.adoc       |  32 ++
 .../ROOT/pages/database/databases/netezza.adoc     |  32 ++
 .../ROOT/pages/database/databases/oracle.adoc      |  32 ++
 .../ROOT/pages/database/databases/oraclerdb.adoc   |  32 ++
 .../ROOT/pages/database/databases/postgresql.adoc  |  32 ++
 .../ROOT/pages/database/databases/redshift.adoc    |  32 ++
 .../ROOT/pages/database/databases/sapdb.adoc       |  32 ++
 .../ROOT/pages/database/databases/snowflake.adoc   |  32 ++
 .../ROOT/pages/database/databases/sqlbase.adoc     |  32 ++
 .../ROOT/pages/database/databases/sqlite.adoc      |  32 ++
 .../ROOT/pages/database/databases/sybase.adoc      |  32 ++
 .../ROOT/pages/database/databases/sybaseiq.adoc    |  32 ++
 .../ROOT/pages/database/databases/teradata.adoc    |  31 ++
 .../ROOT/pages/database/databases/universe.adoc    |  32 ++
 .../ROOT/pages/database/databases/vectorwise.adoc  |  32 ++
 .../ROOT/pages/database/databases/vertica.adoc     |  32 ++
 .../ROOT/pages/pipeline/transforms/abort.adoc      |  35 ++
 .../pages/pipeline/transforms/addsequence.adoc     |  43 +++
 .../ROOT/pages/pipeline/transforms/addxml.adoc     | 103 ++++++
 .../pages/pipeline/transforms/analyticquery.adoc   | 105 ++++++
 .../ROOT/pages/pipeline/transforms/append.adoc     |  42 +++
 .../pipeline/transforms/beambigqueryinput.adoc     |  40 +++
 .../pipeline/transforms/beambigqueryoutput.adoc    |  38 ++
 .../pages/pipeline/transforms/beamconsume.adoc     |  46 +++
 .../ROOT/pages/pipeline/transforms/beaminput.adoc  |  56 +++
 .../ROOT/pages/pipeline/transforms/beamoutput.adoc |  59 +++
 .../pages/pipeline/transforms/beamproduce.adoc     |  36 ++
 .../pages/pipeline/transforms/beampublisher.adoc   |  38 ++
 .../pages/pipeline/transforms/beamsubscriber.adoc  |  39 ++
 .../pages/pipeline/transforms/beamtimestamp.adoc   |  34 ++
 .../ROOT/pages/pipeline/transforms/beamwindow.adoc |  63 ++++
 .../pipeline/transforms/blockingtransform.adoc     |  39 ++
 .../transforms/blockuntiltransformsfinish.adoc     |  43 +++
 .../ROOT/pages/pipeline/transforms/calculator.adoc | 147 ++++++++
 .../pipeline/transforms/changefileencoding.adoc    |  38 ++
 .../ROOT/pages/pipeline/transforms/checksum.adoc   |  43 +++
 .../ROOT/pages/pipeline/transforms/clonerow.adoc   |  38 ++
 .../ROOT/pages/pipeline/transforms/closure.adoc    |  37 ++
 .../ROOT/pages/pipeline/transforms/coalesce.adoc   |  46 +++
 .../pages/pipeline/transforms/columnexists.adoc    |  39 ++
 .../pipeline/transforms/combinationlookup.adoc     |  66 ++++
 .../pages/pipeline/transforms/concatfields.adoc    |  89 +++++
 .../ROOT/pages/pipeline/transforms/constant.adoc   |  33 ++
 .../pipeline/transforms/creditcardvalidator.adoc   |  39 ++
 .../ROOT/pages/pipeline/transforms/csvinput.adoc   |  60 ++++
 .../ROOT/pages/pipeline/transforms/cubeinput.adoc  |  35 ++
 .../ROOT/pages/pipeline/transforms/cubeoutput.adoc |  26 ++
 .../pages/pipeline/transforms/databasejoin.adoc    |  56 +++
 .../pages/pipeline/transforms/databaselookup.adoc  |  62 ++++
 .../ROOT/pages/pipeline/transforms/datagrid.adoc   |  41 +++
 .../ROOT/pages/pipeline/transforms/dbproc.adoc     |  45 +++
 .../ROOT/pages/pipeline/transforms/delay.adoc      |  35 ++
 .../ROOT/pages/pipeline/transforms/delete.adoc     |  39 ++
 .../pages/pipeline/transforms/denormaliser.adoc    |  57 +++
 .../pipeline/transforms/detectemptystream.adoc     |  35 ++
 .../pages/pipeline/transforms/detectlastrow.adoc   |  33 ++
 .../pages/pipeline/transforms/dimensionlookup.adoc | 128 +++++++
 .../ROOT/pages/pipeline/transforms/dummy.adoc      |  32 ++
 .../pages/pipeline/transforms/dynamicsqlrow.adoc   |  40 +++
 .../ROOT/pages/pipeline/transforms/edi2xml.adoc    |  51 +++
 .../pipeline/transforms/enhancedjsonoutput.adoc    | 182 ++++++++++
 .../ROOT/pages/pipeline/transforms/excelinput.adoc | 117 ++++++
 .../pages/pipeline/transforms/excelwriter.adoc     | 108 ++++++
 .../pages/pipeline/transforms/execprocess.adoc     |  42 +++
 .../ROOT/pages/pipeline/transforms/execsqlrow.adoc |  44 +++
 .../ROOT/pages/pipeline/transforms/fake.adoc       | 399 +++++++++++++++++++++
 .../pipeline/transforms/fieldschangesequence.adoc  |  34 ++
 .../pages/pipeline/transforms/fieldsplitter.adoc   |  83 +++++
 .../ROOT/pages/pipeline/transforms/fileexists.adoc |  39 ++
 .../ROOT/pages/pipeline/transforms/filelocked.adoc |  32 ++
 .../pages/pipeline/transforms/filesfromresult.adoc |  47 +++
 .../pages/pipeline/transforms/filestoresult.adoc   |  34 ++
 .../ROOT/pages/pipeline/transforms/filterrows.adoc |  87 +++++
 .../ROOT/pages/pipeline/transforms/flattener.adoc  |  54 +++
 .../ROOT/pages/pipeline/transforms/fuzzymatch.adoc |  69 ++++
 .../pages/pipeline/transforms/getfilenames.adoc    |  62 ++++
 .../pipeline/transforms/getfilesrowcount.adoc      |  63 ++++
 .../pipeline/transforms/getslavesequence.adoc      | 134 +++++++
 .../pages/pipeline/transforms/getsubfolders.adoc   |  53 +++
 .../pages/pipeline/transforms/gettablenames.adoc   |  51 +++
 .../pages/pipeline/transforms/getvariable.adoc     |  52 +++
 .../ROOT/pages/pipeline/transforms/getxmldata.adoc |  96 +++++
 .../ROOT/pages/pipeline/transforms/groupby.adoc    |  65 ++++
 .../ROOT/pages/pipeline/transforms/http.adoc       |  67 ++++
 .../ROOT/pages/pipeline/transforms/httppost.adoc   |  78 ++++
 .../ROOT/pages/pipeline/transforms/ifnull.adoc     |  33 ++
 .../ROOT/pages/pipeline/transforms/injector.adoc   |  36 ++
 .../pages/pipeline/transforms/insertupdate.adoc    |  40 +++
 .../ROOT/pages/pipeline/transforms/janino.adoc     | 159 ++++++++
 .../ROOT/pages/pipeline/transforms/javafilter.adoc |  71 ++++
 .../ROOT/pages/pipeline/transforms/javascript.adoc |  42 +++
 .../ROOT/pages/pipeline/transforms/joinrows.adoc   |  42 +++
 .../ROOT/pages/pipeline/transforms/jsoninput.adoc  | 125 +++++++
 .../ROOT/pages/pipeline/transforms/jsonoutput.adoc | 201 +++++++++++
 .../ROOT/pages/pipeline/transforms/ldapinput.adoc  |  93 +++++
 .../ROOT/pages/pipeline/transforms/ldapoutput.adoc |  88 +++++
 .../pages/pipeline/transforms/loadfileinput.adoc   | 100 ++++++
 .../ROOT/pages/pipeline/transforms/mail.adoc       | 116 ++++++
 .../ROOT/pages/pipeline/transforms/mailinput.adoc  |  93 +++++
 .../pages/pipeline/transforms/mailvalidator.adoc   |  47 +++
 .../ROOT/pages/pipeline/transforms/memgroupby.adoc |  41 +++
 .../ROOT/pages/pipeline/transforms/mergejoin.adoc  |  44 +++
 .../ROOT/pages/pipeline/transforms/mergerows.adoc  |  57 +++
 .../ROOT/pages/pipeline/transforms/metainject.adoc | 100 ++++++
 .../pages/pipeline/transforms/metastructure.adoc   |  35 ++
 .../pipeline/transforms/monetdbbulkloader.adoc     |  84 +++++
 .../pages/pipeline/transforms/mongodbinput.adoc    | 287 +++++++++++++++
 .../pages/pipeline/transforms/mongodboutput.adoc   | 350 ++++++++++++++++++
 .../pages/pipeline/transforms/mqttpublisher.adoc   |  23 ++
 .../pages/pipeline/transforms/mqttsubscriber.adoc  |  23 ++
 .../ROOT/pages/pipeline/transforms/multimerge.adoc |  38 ++
 .../ROOT/pages/pipeline/transforms/normaliser.adoc |  45 +++
 .../ROOT/pages/pipeline/transforms/nullif.adoc     |  29 ++
 .../pages/pipeline/transforms/numberrange.adoc     |  40 +++
 .../pages/pipeline/transforms/pgbulkloader.adoc    |  75 ++++
 .../pipeline/transforms/pgpdecryptstream.adoc      |  39 ++
 .../pipeline/transforms/pgpencryptstream.adoc      |  39 ++
 .../pipeline/transforms/pipelineexcecutor.adoc     |  87 +++++
 .../pages/pipeline/transforms/processfiles.adoc    |  41 +++
 .../pages/pipeline/transforms/propertyinput.adoc   |  76 ++++
 .../pages/pipeline/transforms/propertyoutput.adoc  |  58 +++
 .../pages/pipeline/transforms/randomvalue.adoc     |  42 +++
 .../pipeline/transforms/recordsfromstream.adoc     |  56 +++
 .../ROOT/pages/pipeline/transforms/regexeval.adoc  | 146 ++++++++
 .../pages/pipeline/transforms/replacestring.adoc   |  55 +++
 .../pipeline/transforms/reservoirsampling.adoc     |  35 ++
 .../ROOT/pages/pipeline/transforms/rest.adoc       | 106 ++++++
 .../pages/pipeline/transforms/rowgenerator.adoc    |  38 ++
 .../pages/pipeline/transforms/rowsfromresult.adoc  |  57 +++
 .../pages/pipeline/transforms/rowstoresult.adoc    |  33 ++
 .../ROOT/pages/pipeline/transforms/sasinput.adoc   |  43 +++
 .../pages/pipeline/transforms/selectvalues.adoc    |  92 +++++
 .../pipeline/transforms/setvalueconstant.adoc      |  43 +++
 .../pages/pipeline/transforms/setvaluefield.adoc   |  40 +++
 .../pages/pipeline/transforms/setvariable.adoc     |  43 +++
 .../ROOT/pages/pipeline/transforms/sort.adoc       |  44 +++
 .../pages/pipeline/transforms/sortedmerge.adoc     |  39 ++
 .../pipeline/transforms/splitfieldtorows.adoc      |  40 +++
 .../ROOT/pages/pipeline/transforms/sql.adoc        |  44 +++
 .../pages/pipeline/transforms/sqlfileoutput.adoc   |  64 ++++
 .../ROOT/pages/pipeline/transforms/ssh.adoc        |  68 ++++
 .../pages/pipeline/transforms/streamlookup.adoc    |  45 +++
 .../pipeline/transforms/streamschemamerge.adoc     |  37 ++
 .../ROOT/pages/pipeline/transforms/stringcut.adoc  |  40 +++
 .../pipeline/transforms/stringoperations.adoc      |  55 +++
 .../ROOT/pages/pipeline/transforms/switchcase.adoc |  48 +++
 .../pipeline/transforms/synchronizeaftermerge.adoc |  59 +++
 .../ROOT/pages/pipeline/transforms/systemdata.adoc | 139 +++++++
 .../pages/pipeline/transforms/tablecompare.adoc    |  42 +++
 .../pages/pipeline/transforms/tableexists.adoc     |  36 ++
 .../ROOT/pages/pipeline/transforms/tableinput.adoc |  49 +++
 .../pages/pipeline/transforms/tableoutput.adoc     |  68 ++++
 .../ROOT/pages/pipeline/transforms/terafast.adoc   |  49 +++
 .../pages/pipeline/transforms/textfileinput.adoc   | 240 +++++++++++++
 .../pages/pipeline/transforms/textfileoutput.adoc  | 110 ++++++
 .../pipeline/transforms/tokenreplacement.adoc      |  24 ++
 .../ROOT/pages/pipeline/transforms/uniquerows.adoc |  37 ++
 .../pipeline/transforms/uniquerowsbyhashset.adoc   |  44 +++
 .../ROOT/pages/pipeline/transforms/update.adoc     |  47 +++
 .../pipeline/transforms/userdefinedjavaclass.adoc  | 218 +++++++++++
 .../pages/pipeline/transforms/valuemapper.adoc     |  51 +++
 .../pipeline/transforms/webserviceavailable.adoc   |  37 ++
 .../pages/pipeline/transforms/webservices.adoc     |  50 +++
 .../pipeline/transforms/workflowexecutor.adoc      |  71 ++++
 .../ROOT/pages/pipeline/transforms/writetolog.adoc |  60 ++++
 .../pages/pipeline/transforms/xmlinputstream.adoc  |  87 +++++
 .../ROOT/pages/pipeline/transforms/xmljoin.adoc    |  50 +++
 .../ROOT/pages/pipeline/transforms/xmloutput.adoc  |  87 +++++
 .../pages/pipeline/transforms/xsdvalidator.adoc    |  60 ++++
 .../ROOT/pages/pipeline/transforms/xslt.adoc       |  40 +++
 .../ROOT/pages/pipeline/transforms/yamlinput.adoc  |  78 ++++
 .../ROOT/pages/pipeline/transforms/zipfile.adoc    |  41 +++
 .../modules/ROOT/pages/workflow/actions/abort.adoc |  33 ++
 .../pages/workflow/actions/addresultfilenames.adoc |  36 ++
 .../ROOT/pages/workflow/actions/as400command.adoc  |  40 +++
 .../pages/workflow/actions/checkdbconnection.adoc  |  36 ++
 .../pages/workflow/actions/checkfilelocked.adoc    |  39 ++
 .../ROOT/pages/workflow/actions/columnsexist.adoc  |  36 ++
 .../ROOT/pages/workflow/actions/copyfiles.adoc     |  70 ++++
 .../workflow/actions/copymoveresultfilenames.adoc  |  49 +++
 .../ROOT/pages/workflow/actions/createfile.adoc    |  34 ++
 .../ROOT/pages/workflow/actions/createfolder.adoc  |  34 ++
 .../modules/ROOT/pages/workflow/actions/delay.adoc |  34 ++
 .../ROOT/pages/workflow/actions/deletefile.adoc    |  34 ++
 .../ROOT/pages/workflow/actions/deletefiles.adoc   |  37 ++
 .../ROOT/pages/workflow/actions/deletefolders.adoc |  43 +++
 .../workflow/actions/deleteresultfilenames.adoc    |  35 ++
 .../ROOT/pages/workflow/actions/dostounix.adoc     |  56 +++
 .../ROOT/pages/workflow/actions/dtdvalidator.adoc  |  35 ++
 .../modules/ROOT/pages/workflow/actions/dummy.adoc |  24 ++
 .../modules/ROOT/pages/workflow/actions/eval.adoc  | 138 +++++++
 .../pages/workflow/actions/evalfilesmetrics.adoc   |  82 +++++
 .../workflow/actions/evaluatetablecontent.adoc     |  46 +++
 .../ROOT/pages/workflow/actions/filecompare.adoc   |  34 ++
 .../ROOT/pages/workflow/actions/fileexists.adoc    |  39 ++
 .../ROOT/pages/workflow/actions/filesexist.adoc    |  40 +++
 .../ROOT/pages/workflow/actions/folderisempty.adoc |  36 ++
 .../pages/workflow/actions/folderscompare.adoc     |  40 +++
 .../modules/ROOT/pages/workflow/actions/ftp.adoc   |  88 +++++
 .../ROOT/pages/workflow/actions/ftpdelete.adoc     |  97 +++++
 .../ROOT/pages/workflow/actions/ftpput.adoc        |  71 ++++
 .../ROOT/pages/workflow/actions/ftpsget.adoc       |  83 +++++
 .../ROOT/pages/workflow/actions/ftpsput.adoc       |  69 ++++
 .../ROOT/pages/workflow/actions/getpop.adoc        |  43 +++
 .../modules/ROOT/pages/workflow/actions/http.adoc  |  62 ++++
 .../modules/ROOT/pages/workflow/actions/mail.adoc  | 120 +++++++
 .../ROOT/pages/workflow/actions/mailvalidator.adoc |  46 +++
 .../ROOT/pages/workflow/actions/movefiles.adoc     | 107 ++++++
 .../ROOT/pages/workflow/actions/msgboxinfo.adoc    |  41 +++
 .../ROOT/pages/workflow/actions/mssqlbulkload.adoc |  73 ++++
 .../ROOT/pages/workflow/actions/mysqlbulkfile.adoc |  48 +++
 .../ROOT/pages/workflow/actions/mysqlbulkload.adoc |  47 +++
 .../pages/workflow/actions/pgpdecryptfiles.adoc    |  95 +++++
 .../pages/workflow/actions/pgpencryptfiles.adoc    |  97 +++++
 .../ROOT/pages/workflow/actions/pgpfiles.adoc      |  20 ++
 .../ROOT/pages/workflow/actions/pgpverify.adoc     |  36 ++
 .../modules/ROOT/pages/workflow/actions/ping.adoc  |  38 ++
 .../ROOT/pages/workflow/actions/pipeline.adoc      | 100 ++++++
 .../ROOT/pages/workflow/actions/repeat-end.adoc    |  29 ++
 .../ROOT/pages/workflow/actions/repeat.adoc        |  57 +++
 .../pages/workflow/actions/runpipelinetests.adoc   |  35 ++
 .../workflow/actions/sendnagiospassivecheck.adoc   |  46 +++
 .../ROOT/pages/workflow/actions/setvariables.adoc  |  40 +++
 .../modules/ROOT/pages/workflow/actions/sftp.adoc  |  75 ++++
 .../ROOT/pages/workflow/actions/sftpput.adoc       |  73 ++++
 .../modules/ROOT/pages/workflow/actions/shell.adoc |  48 +++
 .../ROOT/pages/workflow/actions/simpleeval.adoc    |  79 ++++
 .../ROOT/pages/workflow/actions/snmptrap.adoc      |  44 +++
 .../modules/ROOT/pages/workflow/actions/sql.adoc   |  38 ++
 .../modules/ROOT/pages/workflow/actions/start.adoc |  26 ++
 .../ROOT/pages/workflow/actions/success.adoc       |  32 ++
 .../ROOT/pages/workflow/actions/tableexists.adoc   |  37 ++
 .../ROOT/pages/workflow/actions/telnet.adoc        |  35 ++
 .../pages/workflow/actions/truncatetables.adoc     |  39 ++
 .../modules/ROOT/pages/workflow/actions/unzip.adoc |  55 +++
 .../ROOT/pages/workflow/actions/waitforfile.adoc   |  38 ++
 .../ROOT/pages/workflow/actions/waitforsql.adoc    |  44 +++
 .../workflow/actions/webserviceavailable.adoc      |  35 ++
 .../ROOT/pages/workflow/actions/workflow.adoc      |  97 +++++
 .../ROOT/pages/workflow/actions/writetofile.adoc   |  38 ++
 .../ROOT/pages/workflow/actions/writetolog.adoc    |  35 ++
 .../ROOT/pages/workflow/actions/xmlwellformed.adoc |  48 +++
 .../ROOT/pages/workflow/actions/xsdvalidator.adoc  |  41 +++
 .../modules/ROOT/pages/workflow/actions/xslt.adoc  |  48 +++
 .../ROOT/pages/workflow/actions/zipfile.adoc       |  66 ++++
 268 files changed, 15543 insertions(+), 74 deletions(-)

diff --git a/hop-user-manual/modules/.asciidoctor/pages/index.adoc b/hop-user-manual/modules/.asciidoctor/pages/index.adoc
index 92dc638..dbf380e 100644
--- a/hop-user-manual/modules/.asciidoctor/pages/index.adoc
+++ b/hop-user-manual/modules/.asciidoctor/pages/index.adoc
@@ -49,11 +49,11 @@ include::{sourcepath}/hop-server/web-service.adoc[leveloffset=+1]
 include::{sourcepath}/plugins/plugins.adoc[leveloffset=+0]
 include::{sourcepath}/plugins/transforms.adoc[leveloffset=+1]
 include::{sourcepath}/plugins/transforms/abort.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/checksum.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/constant.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/addsequence.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/addxml.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/checksum.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/fieldschangesequence.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/addxml.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/analyticquery.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/append.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/beambigqueryinput.adoc[leveloffset=+2]
@@ -68,7 +68,6 @@ include::{sourcepath}/plugins/transforms/beamtimestamp.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/beamwindow.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/blockingtransform.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/blockuntiltransformsfinish.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/csvinput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/calculator.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/dbproc.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/changefileencoding.adoc[leveloffset=+2]
@@ -82,12 +81,13 @@ include::{sourcepath}/plugins/transforms/combinationlookup.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/concatfields.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/rowstoresult.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/creditcardvalidator.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/csvinput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/cubeinput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/cubeoutput.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/datagrid.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/validator.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/databasejoin.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/databaselookup.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/datagrid.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/validator.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/delay.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/delete.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/detectemptystream.adoc[leveloffset=+2]
@@ -98,9 +98,9 @@ include::{sourcepath}/plugins/transforms/edi2xml.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/mailinput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/enhancedjsonoutput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/excelwriter.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/sql.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/execprocess.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/execsqlrow.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/sql.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/fake.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/fileexists.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/filesfromresult.adoc[leveloffset=+2]
@@ -109,33 +109,31 @@ include::{sourcepath}/plugins/transforms/filterrows.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/fuzzymatch.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/randomvalue.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/getxmldata.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/getfilenames.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/getfilesrowcount.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/getslavesequence.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/recordsfromstream.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/rowsfromresult.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/getsubfolders.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/systemdata.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/getfilenames.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/getfilesrowcount.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/recordsfromstream.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/gettablenames.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/getvariable.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/groupby.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/httppost.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/http.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/httppost.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/detectlastrow.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/ifnull.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/injector.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/insertupdate.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/jsoninput.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/jsonoutput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/javafilter.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/javascript.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/joinrows.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/jsoninput.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/jsonoutput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/ldapinput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/ldapoutput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/ldifinput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/loadfileinput.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/mqttpublisher.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/mqttsubscriber.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/mail.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/mailvalidator.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/memgroupby.adoc[leveloffset=+2]
@@ -148,6 +146,8 @@ include::{sourcepath}/plugins/transforms/exceloutput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/monetdbbulkloader.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/mongodbinput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/mongodboutput.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/mqttpublisher.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/mqttsubscriber.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/multimerge.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/nullif.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/numberrange.adoc[leveloffset=+2]
@@ -156,27 +156,27 @@ include::{sourcepath}/plugins/transforms/pgpencryptstream.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/pipelineexcecutor.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/pgbulkloader.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/processfiles.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/rest.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/propertyinput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/regexeval.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/replacestring.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/reservoirsampling.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/rest.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/denormaliser.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/flattener.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/rowgenerator.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/normaliser.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/ssh.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/sasinput.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/sqlfileoutput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/selectvalues.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/syslog.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/setvariable.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/setvaluefield.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/setvalueconstant.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/transforms/sort.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/setvariable.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/sortedmerge.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/sort.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/fieldsplitter.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/splitfieldtorows.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/transforms/sqlfileoutput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/streamlookup.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/streamschemamerge.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/transforms/stringoperations.adoc[leveloffset=+2]
@@ -213,12 +213,13 @@ include::{sourcepath}/plugins/projects/projects.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/passwords.adoc[leveloffset=+1]
 include::{sourcepath}/plugins/passwords/aespasswords.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases.adoc[leveloffset=+1]
-include::{sourcepath}/plugins/databases/as400.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/derby.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/as400.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/cache.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/clickhouse.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/db2.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/exasol.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/firebird.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/googlebigquery.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/greenplum.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/sqlbase.adoc[leveloffset=+2]
@@ -227,10 +228,15 @@ include::{sourcepath}/plugins/databases/hypersonic.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/universe.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/infinidb.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/infobright.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/databases/mssql.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/databases/mssqlnative.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/informix.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/ingres.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/interbase.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/kingbasees.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/mariadb.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/sapdb.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/monetdb.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/mssql.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/databases/mssqlnative.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/mysql.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/netezza.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/oracle.adoc[leveloffset=+2]
@@ -244,16 +250,10 @@ include::{sourcepath}/plugins/databases/sybaseiq.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/teradata.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/vectorwise.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/databases/vertica.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/databases/firebird.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/databases/informix.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/databases/ingres.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/databases/interbase.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/databases/kingbasees.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/databases/monetdb.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions.adoc[leveloffset=+1]
-include::{sourcepath}/plugins/actions/as400command.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/abort.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/addresultfilenames.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/as400command.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/checkdbconnection.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/checkfilelocked.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/webserviceavailable.adoc[leveloffset=+2]
@@ -264,35 +264,35 @@ include::{sourcepath}/plugins/actions/copyfiles.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/copymoveresultfilenames.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/createfile.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/createfolder.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/dtdvalidator.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/pgpdecryptfiles.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/delay.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/deletefile.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/deleteresultfilenames.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/deletefiles.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/deletefolders.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/deleteresultfilenames.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/msgboxinfo.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/dostounix.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/dtdvalidator.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/dummy.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/pgpencryptfiles.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/repeat-end.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/evalfilesmetrics.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/evaluatetablecontent.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/ftpdelete.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/fileexists.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/filecompare.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/fileexists.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/folderisempty.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/folderscompare.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/getpop.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/ftpdelete.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/ftp.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/ftpsget.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/sftp.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/getpop.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/http.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/eval.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/mssqlbulkload.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/mail.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/mailvalidator.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/movefiles.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/mssqlbulkload.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/mysqlbulkfile.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/mysqlbulkload.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/pgpfiles.adoc[leveloffset=+2]
@@ -302,13 +302,13 @@ include::{sourcepath}/plugins/actions/ftpput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/sftpput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/repeat.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/runpipelinetests.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/sql.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/sendnagiospassivecheck.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/syslog.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/sendnagiospassivecheck.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/setvariables.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/shell.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/simpleeval.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/snmptrap.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/sql.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/start.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/success.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/tableexists.adoc[leveloffset=+2]
@@ -317,8 +317,8 @@ include::{sourcepath}/plugins/actions/truncatetables.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/unzip.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/ftpsput.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/pgpverify.adoc[leveloffset=+2]
-include::{sourcepath}/plugins/actions/waitforsql.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/waitforfile.adoc[leveloffset=+2]
+include::{sourcepath}/plugins/actions/waitforsql.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/workflow.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/writetofile.adoc[leveloffset=+2]
 include::{sourcepath}/plugins/actions/writetolog.adoc[leveloffset=+2]
diff --git a/hop-user-manual/modules/ROOT/nav.adoc b/hop-user-manual/modules/ROOT/nav.adoc
index b6df330..1fa1146 100644
--- a/hop-user-manual/modules/ROOT/nav.adoc
+++ b/hop-user-manual/modules/ROOT/nav.adoc
@@ -32,11 +32,11 @@
 //::=START AUTO GENERATED LINKS
 ** xref:plugins/transforms.adoc[Transform Plugins]
 *** xref:plugins/transforms/abort.adoc[Abort]
+*** xref:plugins/transforms/checksum.adoc[Add a checksum]
 *** xref:plugins/transforms/constant.adoc[Add Constants]
 *** xref:plugins/transforms/addsequence.adoc[Add Sequence]
-*** xref:plugins/transforms/addxml.adoc[Add XML]
-*** xref:plugins/transforms/checksum.adoc[Add a checksum]
 *** xref:plugins/transforms/fieldschangesequence.adoc[Add value fields changing sequence]
+*** xref:plugins/transforms/addxml.adoc[Add XML]
 *** xref:plugins/transforms/analyticquery.adoc[Analytic Query]
 *** xref:plugins/transforms/append.adoc[Append Streams]
 *** xref:plugins/transforms/beambigqueryinput.adoc[Beam BigQuery Input]
@@ -51,7 +51,6 @@
 *** xref:plugins/transforms/beamwindow.adoc[Beam Window]
 *** xref:plugins/transforms/blockingtransform.adoc[Blocking transform]
 *** xref:plugins/transforms/blockuntiltransformsfinish.adoc[Blocking until transforms finish]
-*** xref:plugins/transforms/csvinput.adoc[CSV File Input]
 *** xref:plugins/transforms/calculator.adoc[Calculator]
 *** xref:plugins/transforms/dbproc.adoc[Call DB procedure]
 *** xref:plugins/transforms/changefileencoding.adoc[Change file encoding]
@@ -65,12 +64,13 @@
 *** xref:plugins/transforms/concatfields.adoc[Concat Fields]
 *** xref:plugins/transforms/rowstoresult.adoc[Copy rows to result]
 *** xref:plugins/transforms/creditcardvalidator.adoc[Credit card validator]
+*** xref:plugins/transforms/csvinput.adoc[CSV File Input]
 *** xref:plugins/transforms/cubeinput.adoc[Cube input]
 *** xref:plugins/transforms/cubeoutput.adoc[Cube output]
-*** xref:plugins/transforms/datagrid.adoc[Data Grid]
-*** xref:plugins/transforms/validator.adoc[Data Validator]
 *** xref:plugins/transforms/databasejoin.adoc[Database Join]
 *** xref:plugins/transforms/databaselookup.adoc[Database Lookup]
+*** xref:plugins/transforms/datagrid.adoc[Data Grid]
+*** xref:plugins/transforms/validator.adoc[Data Validator]
 *** xref:plugins/transforms/delay.adoc[Delay row]
 *** xref:plugins/transforms/delete.adoc[Delete]
 *** xref:plugins/transforms/detectemptystream.adoc[Detect Empty Stream]
@@ -81,9 +81,9 @@
 *** xref:plugins/transforms/mailinput.adoc[Email Messages Input]
 *** xref:plugins/transforms/enhancedjsonoutput.adoc[Enhanced JSON Output]
 *** xref:plugins/transforms/excelwriter.adoc[Excel writer]
-*** xref:plugins/transforms/sql.adoc[Execute SQL script]
 *** xref:plugins/transforms/execprocess.adoc[Execute a process]
 *** xref:plugins/transforms/execsqlrow.adoc[Execute row SQL script]
+*** xref:plugins/transforms/sql.adoc[Execute SQL script]
 *** xref:plugins/transforms/fake.adoc[Fake data]
 *** xref:plugins/transforms/fileexists.adoc[File exists]
 *** xref:plugins/transforms/filesfromresult.adoc[Files from result]
@@ -92,33 +92,31 @@
 *** xref:plugins/transforms/fuzzymatch.adoc[Fuzzy match]
 *** xref:plugins/transforms/randomvalue.adoc[Generate Random Value]
 *** xref:plugins/transforms/getxmldata.adoc[Get Data From XML]
+*** xref:plugins/transforms/getfilenames.adoc[Get filenames]
+*** xref:plugins/transforms/getfilesrowcount.adoc[Get files rowcount]
 *** xref:plugins/transforms/getslavesequence.adoc[Get ID from slave server]
+*** xref:plugins/transforms/recordsfromstream.adoc[Get records from stream ]
 *** xref:plugins/transforms/rowsfromresult.adoc[Get Rows from Result]
 *** xref:plugins/transforms/getsubfolders.adoc[Get SubFolder names]
 *** xref:plugins/transforms/systemdata.adoc[Get System Info]
-*** xref:plugins/transforms/getfilenames.adoc[Get filenames]
-*** xref:plugins/transforms/getfilesrowcount.adoc[Get files rowcount]
-*** xref:plugins/transforms/recordsfromstream.adoc[Get records from stream ]
 *** xref:plugins/transforms/gettablenames.adoc[Get table names]
 *** xref:plugins/transforms/getvariable.adoc[Get variables]
 *** xref:plugins/transforms/groupby.adoc[Group By]
-*** xref:plugins/transforms/httppost.adoc[HTTP Post]
 *** xref:plugins/transforms/http.adoc[HTTP client]
+*** xref:plugins/transforms/httppost.adoc[HTTP Post]
 *** xref:plugins/transforms/detectlastrow.adoc[Identify last row in a stream]
 *** xref:plugins/transforms/ifnull.adoc[If Null]
 *** xref:plugins/transforms/injector.adoc[Injector]
 *** xref:plugins/transforms/insertupdate.adoc[Insert / Update]
-*** xref:plugins/transforms/jsoninput.adoc[JSON Input]
-*** xref:plugins/transforms/jsonoutput.adoc[JSON Output]
 *** xref:plugins/transforms/javafilter.adoc[Java Filter]
 *** xref:plugins/transforms/javascript.adoc[JavaScript]
 *** xref:plugins/transforms/joinrows.adoc[Join Rows]
+*** xref:plugins/transforms/jsoninput.adoc[JSON Input]
+*** xref:plugins/transforms/jsonoutput.adoc[JSON Output]
 *** xref:plugins/transforms/ldapinput.adoc[LDAP Input]
 *** xref:plugins/transforms/ldapoutput.adoc[LDAP Output]
 *** xref:plugins/transforms/ldifinput.adoc[LDIF Input]
 *** xref:plugins/transforms/loadfileinput.adoc[Load file content in memory]
-*** xref:plugins/transforms/mqttpublisher.adoc[MQTT Publisher]
-*** xref:plugins/transforms/mqttsubscriber.adoc[MQTT Subscriber]
 *** xref:plugins/transforms/mail.adoc[Mail]
 *** xref:plugins/transforms/mailvalidator.adoc[Mail Validator]
 *** xref:plugins/transforms/memgroupby.adoc[Memory Group By]
@@ -131,6 +129,8 @@
 *** xref:plugins/transforms/monetdbbulkloader.adoc[MonetDB Bulk Loader]
 *** xref:plugins/transforms/mongodbinput.adoc[MongoDB Input]
 *** xref:plugins/transforms/mongodboutput.adoc[MongoDB Output]
+*** xref:plugins/transforms/mqttpublisher.adoc[MQTT Publisher]
+*** xref:plugins/transforms/mqttsubscriber.adoc[MQTT Subscriber]
 *** xref:plugins/transforms/multimerge.adoc[Multiway Merge Join]
 *** xref:plugins/transforms/nullif.adoc[Null If]
 *** xref:plugins/transforms/numberrange.adoc[Number range]
@@ -139,27 +139,27 @@
 *** xref:plugins/transforms/pipelineexcecutor.adoc[Pipeline Executor]
 *** xref:plugins/transforms/pgbulkloader.adoc[PostgreSQL Bulk Loader]
 *** xref:plugins/transforms/processfiles.adoc[Process files]
-*** xref:plugins/transforms/rest.adoc[REST Client]
 *** xref:plugins/transforms/propertyinput.adoc[Read data (key, value) from properties files.]
 *** xref:plugins/transforms/regexeval.adoc[Regex Evaluation]
 *** xref:plugins/transforms/replacestring.adoc[Replace in String]
 *** xref:plugins/transforms/reservoirsampling.adoc[Reservoir Sampling]
+*** xref:plugins/transforms/rest.adoc[REST Client]
 *** xref:plugins/transforms/denormaliser.adoc[Row Denormaliser]
 *** xref:plugins/transforms/flattener.adoc[Row Flattener]
 *** xref:plugins/transforms/rowgenerator.adoc[Row Generator]
 *** xref:plugins/transforms/normaliser.adoc[Row Normaliser]
 *** xref:plugins/transforms/ssh.adoc[Run SSH commands]
 *** xref:plugins/transforms/sasinput.adoc[SAS Input]
-*** xref:plugins/transforms/sqlfileoutput.adoc[SQL File Output]
 *** xref:plugins/transforms/selectvalues.adoc[Select Values]
 *** xref:plugins/transforms/syslog.adoc[Send Message to Syslog]
-*** xref:plugins/transforms/setvariable.adoc[Set Variables]
 *** xref:plugins/transforms/setvaluefield.adoc[Set field Value]
 *** xref:plugins/transforms/setvalueconstant.adoc[Set field value to a constant]
-*** xref:plugins/transforms/sort.adoc[Sort Rows]
+*** xref:plugins/transforms/setvariable.adoc[Set Variables]
 *** xref:plugins/transforms/sortedmerge.adoc[Sorted Merge]
+*** xref:plugins/transforms/sort.adoc[Sort Rows]
 *** xref:plugins/transforms/fieldsplitter.adoc[Split Fields]
 *** xref:plugins/transforms/splitfieldtorows.adoc[Split fields to rows]
+*** xref:plugins/transforms/sqlfileoutput.adoc[SQL File Output]
 *** xref:plugins/transforms/streamlookup.adoc[Stream Lookup]
 *** xref:plugins/transforms/streamschemamerge.adoc[Stream Schema Merge]
 *** xref:plugins/transforms/stringoperations.adoc[String operations]
@@ -196,12 +196,13 @@
 ** xref:plugins/passwords.adoc[Password Plugins]
 *** xref:plugins/passwords/aespasswords.adoc[AES Two way password encoder]
 ** xref:plugins/databases.adoc[Database Plugins]
-*** xref:plugins/databases/as400.adoc[AS400]
 *** xref:plugins/databases/derby.adoc[Apache Derby]
+*** xref:plugins/databases/as400.adoc[AS400]
 *** xref:plugins/databases/cache.adoc[Cache]
 *** xref:plugins/databases/clickhouse.adoc[ClickHouse]
 *** xref:plugins/databases/db2.adoc[DB2]
 *** xref:plugins/databases/exasol.adoc[Exasol]
+*** xref:plugins/databases/firebird.adoc[firebird]
 *** xref:plugins/databases/googlebigquery.adoc[Google BigQuery]
 *** xref:plugins/databases/greenplum.adoc[Greenplum]
 *** xref:plugins/databases/sqlbase.adoc[Gupta SQLBase]
@@ -210,10 +211,15 @@
 *** xref:plugins/databases/universe.adoc[IBM Universe]
 *** xref:plugins/databases/infinidb.adoc[InfiniDB]
 *** xref:plugins/databases/infobright.adoc[Infobright DB]
-*** xref:plugins/databases/mssql.adoc[MS SqlServer]
-*** xref:plugins/databases/mssqlnative.adoc[MS SqlServer (Native)]
+*** xref:plugins/databases/informix.adoc[informix]
+*** xref:plugins/databases/ingres.adoc[ingres]
+*** xref:plugins/databases/interbase.adoc[interbase]
+*** xref:plugins/databases/kingbasees.adoc[kingbase es]
 *** xref:plugins/databases/mariadb.adoc[Mariadb]
 *** xref:plugins/databases/sapdb.adoc[MaxDB (SAP DB)]
+*** xref:plugins/databases/monetdb.adoc[monetdb]
+*** xref:plugins/databases/mssql.adoc[MS SqlServer]
+*** xref:plugins/databases/mssqlnative.adoc[MS SqlServer (Native)]
 *** xref:plugins/databases/mysql.adoc[MySql]
 *** xref:plugins/databases/netezza.adoc[Netezza]
 *** xref:plugins/databases/oracle.adoc[Oracle]
@@ -227,16 +233,10 @@
 *** xref:plugins/databases/teradata.adoc[Teradata]
 *** xref:plugins/databases/vectorwise.adoc[Vectorwise]
 *** xref:plugins/databases/vertica.adoc[Vertica]
-*** xref:plugins/databases/firebird.adoc[firebird]
-*** xref:plugins/databases/informix.adoc[informix]
-*** xref:plugins/databases/ingres.adoc[ingres]
-*** xref:plugins/databases/interbase.adoc[interbase]
-*** xref:plugins/databases/kingbasees.adoc[kingbase es]
-*** xref:plugins/databases/monetdb.adoc[monetdb]
 ** xref:plugins/actions.adoc[Action Plugins]
-*** xref:plugins/actions/as400command.adoc[AS/400 Command]
 *** xref:plugins/actions/abort.adoc[Abort]
 *** xref:plugins/actions/addresultfilenames.adoc[Add filenames to result]
+*** xref:plugins/actions/as400command.adoc[AS/400 Command]
 *** xref:plugins/actions/checkdbconnection.adoc[Check Db connections]
 *** xref:plugins/actions/checkfilelocked.adoc[Check Files Locked]
 *** xref:plugins/actions/webserviceavailable.adoc[Check if Webservice is available]
@@ -247,35 +247,35 @@
 *** xref:plugins/actions/copymoveresultfilenames.adoc[Copy Or Move Files from result]
 *** xref:plugins/actions/createfile.adoc[Create File]
 *** xref:plugins/actions/createfolder.adoc[Create Folder]
-*** xref:plugins/actions/dtdvalidator.adoc[DTD Validator]
 *** xref:plugins/actions/pgpdecryptfiles.adoc[Decrypt files with PGP]
 *** xref:plugins/actions/delay.adoc[Delay]
 *** xref:plugins/actions/deletefile.adoc[Delete File]
+*** xref:plugins/actions/deleteresultfilenames.adoc[Delete filenames from result]
 *** xref:plugins/actions/deletefiles.adoc[Delete Files]
 *** xref:plugins/actions/deletefolders.adoc[Delete Folders]
-*** xref:plugins/actions/deleteresultfilenames.adoc[Delete filenames from result]
 *** xref:plugins/actions/msgboxinfo.adoc[Display Msgbox info]
 *** xref:plugins/actions/dostounix.adoc[Dos To Unix]
+*** xref:plugins/actions/dtdvalidator.adoc[DTD Validator]
 *** xref:plugins/actions/dummy.adoc[Dummy]
 *** xref:plugins/actions/pgpencryptfiles.adoc[Encrypt files with PGP]
 *** xref:plugins/actions/repeat-end.adoc[End Repeat]
 *** xref:plugins/actions/evalfilesmetrics.adoc[Evaluate File Metrics]
 *** xref:plugins/actions/evaluatetablecontent.adoc[Evaluate rows number in a table]
-*** xref:plugins/actions/ftpdelete.adoc[FTP delete]
-*** xref:plugins/actions/fileexists.adoc[File Exists]
 *** xref:plugins/actions/filecompare.adoc[File compare]
+*** xref:plugins/actions/fileexists.adoc[File Exists]
 *** xref:plugins/actions/folderisempty.adoc[Folder is Empty]
 *** xref:plugins/actions/folderscompare.adoc[Folders Compare]
-*** xref:plugins/actions/getpop.adoc[Get Mail from POP]
+*** xref:plugins/actions/ftpdelete.adoc[FTP delete]
 *** xref:plugins/actions/ftp.adoc[Get a file with FTP]
 *** xref:plugins/actions/ftpsget.adoc[Get a file with FTPS]
 *** xref:plugins/actions/sftp.adoc[Get a file with SFTP]
+*** xref:plugins/actions/getpop.adoc[Get Mail from POP]
 *** xref:plugins/actions/http.adoc[Http Get]
 *** xref:plugins/actions/eval.adoc[JavaScript]
-*** xref:plugins/actions/mssqlbulkload.adoc[MS SQL Server Bulk Loader]
 *** xref:plugins/actions/mail.adoc[Mail]
 *** xref:plugins/actions/mailvalidator.adoc[Mail Validator]
 *** xref:plugins/actions/movefiles.adoc[Move Files]
+*** xref:plugins/actions/mssqlbulkload.adoc[MS SQL Server Bulk Loader]
 *** xref:plugins/actions/mysqlbulkfile.adoc[MySQL Bulk File]
 *** xref:plugins/actions/mysqlbulkload.adoc[MySQL Bulk Loader]
 *** xref:plugins/actions/pgpfiles.adoc[PGP Files]
@@ -285,13 +285,13 @@
 *** xref:plugins/actions/sftpput.adoc[Put a file with SFTP]
 *** xref:plugins/actions/repeat.adoc[Repeat]
 *** xref:plugins/actions/runpipelinetests.adoc[Run Pipeline Tests]
-*** xref:plugins/actions/sql.adoc[SQL Script Executor]
-*** xref:plugins/actions/sendnagiospassivecheck.adoc[Send Nagios check]
 *** xref:plugins/actions/syslog.adoc[Send information using Syslog]
+*** xref:plugins/actions/sendnagiospassivecheck.adoc[Send Nagios check]
 *** xref:plugins/actions/setvariables.adoc[Set Variables]
 *** xref:plugins/actions/shell.adoc[Shell Action]
 *** xref:plugins/actions/simpleeval.adoc[Simple Evaluation]
 *** xref:plugins/actions/snmptrap.adoc[Snmp Trap]
+*** xref:plugins/actions/sql.adoc[SQL Script Executor]
 *** xref:plugins/actions/start.adoc[Start]
 *** xref:plugins/actions/success.adoc[Success Action]
 *** xref:plugins/actions/tableexists.adoc[Table Exists]
@@ -300,8 +300,8 @@
 *** xref:plugins/actions/unzip.adoc[Unzip]
 *** xref:plugins/actions/ftpsput.adoc[Upload files to FTPS]
 *** xref:plugins/actions/pgpverify.adoc[Verify file signature with PGP]
-*** xref:plugins/actions/waitforsql.adoc[Wait for SQL]
 *** xref:plugins/actions/waitforfile.adoc[Wait for file]
+*** xref:plugins/actions/waitforsql.adoc[Wait for SQL]
 *** xref:plugins/actions/workflow.adoc[Workflow]
 *** xref:plugins/actions/writetofile.adoc[Write to File]
 *** xref:plugins/actions/writetolog.adoc[Write to Log]
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/as400.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/as400.adoc
new file mode 100644
index 0000000..acd7b30
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/as400.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-as400]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/as400/src/main/doc/as400.adoc
+= AS400
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 10.2
+|Hop Dependencies | None
+|Documentation | https://www.ibm.com/support/knowledgecenter/ssw_ibm_i_71/rzahh/javadoc/com/ibm/as400/access/doc-files/JDBCProperties.html[Documentation Link]
+|JDBC Url | jdbc:as400://hostname/default-schema
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/cache.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/cache.adoc
new file mode 100644
index 0000000..becfd09
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/cache.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-cache]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/cache/src/main/doc/cache.adoc
+= Cache
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 3.0.0
+|Hop Dependencies | None
+|Documentation | https://cedocs.intersystems.com/latest/csp/docbook/DocBook.UI.Page.cls?KEY=BGJD[Documentation Link]
+|JDBC Url  | jdbc:Cache://hostname:1972/database
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/clickhouse.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/clickhouse.adoc
new file mode 100644
index 0000000..9486341
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/clickhouse.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-clickhouse]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/clickhouse/src/main/doc/clickhouse.adoc
+= ClickHouse
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Columnar
+|Driver | Included
+|Version Included | 1.4.4
+|Hop Dependencies | None
+|Documentation | https://github.com/blynkkk/clickhouse4j[Documentation Link]
+|JDBC Url | jdbc:clickhouse://<host>:<port>[/<database>]
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/db2.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/db2.adoc
new file mode 100644
index 0000000..a72e7ef
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/db2.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-db2]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/db2/src/main/doc/db2.adoc
+= DB2
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://www.ibm.com/support/pages/db2-jdbc-driver-versions-and-downloads[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://www.ibm.com/support/knowledgecenter/SSCQGF_7.2.0.1/com.ibm.IBMDI.doc_7.2.0.1/rg_conn_jdbc.html[Documentation Link]
+|JDBC Url  | jdbc:db2://hostname:port/dbname
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/derby.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/derby.adoc
new file mode 100644
index 0000000..4dda0b9
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/derby.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-derby]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/derby/src/main/doc/derby.adoc
+= Apache Derby
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 10.15.1.3
+|Hop Dependencies | None
+|Documentation | https://db.apache.org/derby/derby_downloads.html[Documentation Link]
+|JDBC Url | jdbc:derby:myDB
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/exasol.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/exasol.adoc
new file mode 100644
index 0000000..18d399e
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/exasol.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-exasol]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/exasol4/src/main/doc/exasol.adoc
+= Exasol
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 6.2.1
+|Hop Dependencies | None
+|Documentation | https://docs.exasol.com/connect_exasol/drivers/jdbc.htm[Documentation Link]
+|JDBC Url | jdbc:exa:<server>:<port8563>
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/firebird.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/firebird.adoc
new file mode 100644
index 0000000..9f81db9
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/firebird.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-firebird]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/firebird/src/main/doc/firebird.adoc
+= firebird
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://firebirdsql.org/en/jdbc-driver/[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://firebirdsql.github.io/jaybird-manual/jaybird_manual.html[Documentation Link]
+|JDBC Url | jdbc:firebirdsql://localhost:3050/c:/database/example.fdb
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/googlebigquery.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/googlebigquery.adoc
new file mode 100644
index 0000000..b76cd01
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/googlebigquery.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-googlebigquery]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/googlebigquery/src/main/doc/googlebigquery.adoc
+= Google BigQuery
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://cloud.google.com/bigquery/providers/simba-drivers/[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://www.simba.com/products/BigQuery/doc/JDBC_InstallGuide/content/jdbc/d-intro.htm[Documentation Link]
+|JDBC Url | jdbc:bigquery://[Host]:[Port];ProjectId=[Project];OAuthType=[AuthValue]
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/greenplum.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/greenplum.adoc
new file mode 100644
index 0000000..7e02518
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/greenplum.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-greenplum]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/greenplum/src/main/doc/greenplum.adoc
+= Greenplum
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 5.1.4
+|Hop Dependencies | Postgresql Plugin
+|Documentation | https://gpdb.docs.pivotal.io/590/datadirect/datadirect_jdbc.html[Documentation Link]
+|JDB Url | jdbc:pivotal:greenplum://host:port;DatabaseName=<name>
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/h2.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/h2.adoc
new file mode 100644
index 0000000..159ac55
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/h2.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-h2]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/h2/src/main/doc/h2.adoc
+= H2
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 1.4.200
+|Hop Dependencies | None
+|Documentation | https://jdbc.postgresql.org/documentation/head/index.html[Documentation Link]
+|JDBC Url | jdbc:h2:[file:][<path>]<databaseName>
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/hypersonic.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/hypersonic.adoc
new file mode 100644
index 0000000..3fbac78
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/hypersonic.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-hypersonic]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/hypersonic/src/main/doc/hypersonic.adoc
+= Hypersonic
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 2.5.0
+|Hop Dependencies | None
+|Documentation | http://hsqldb.org/doc/2.0/guide/dbproperties-chapt.html[Documentation Link]
+|JDBC Url | jdbc:hsqldb:hsql//hostname 
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/infinidb.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/infinidb.adoc
new file mode 100644
index 0000000..472d4ef
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/infinidb.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-infinidb]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/infinidb/src/main/doc/infinidb.adoc
+= InfiniDB
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Use MySQL Drivers
+|Version Included | None
+|Hop Dependencies | Mysql Database Plugin
+|Documentation | https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference.html[Documentation Link]
+|JDBC Url | jdbc:mysql://hostname:3306/databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/infobright.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/infobright.adoc
new file mode 100644
index 0000000..f8131a3
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/infobright.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-infobright]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/infobright/src/main/doc/infobright.adoc
+= Infobright DB
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Use MySQL Drivers
+|Version Included | None
+|Hop Dependencies | Mysql Database Plugin
+|Documentation | https://dev.mysql.com/doc/connector-j/8.0/en/connector-j-reference.html[Documentation Link]
+|JDBC Url | jdbc:mysql://hostname:3306/databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/informix.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/informix.adoc
new file mode 100644
index 0000000..8b56533
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/informix.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-informix]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/informix/src/main/doc/informix.adoc
+= informix
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://download.oracle.com/otndocs/jcp/jdbc-4.0-fr-eval-oth-JSpec/[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://www.ibm.com/support/knowledgecenter/en/SSGU8G_12.1.0/com.ibm.jdbc_pg.doc/jdbc.htm[Documentation Link]
+|JDBC Url | jdbc:informix-sqli://hostname:1533/databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/ingres.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/ingres.adoc
new file mode 100644
index 0000000..e670d9d
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/ingres.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-ingres]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/ingres/src/main/doc/ingres.adoc
+= ingres
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://esd.actian.com/product/drivers/JDBC/java/JDBC[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://docs.actian.com/ingres/11.0/index.html#page/Connectivity%2FJDBC_Driver_and_Data_Source_Classes.htm%23[Documentation Link]
+|JDBC Url | jdbc:ingres://host:port{,port}{;host:port{,port}}/db{;attr=value}
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/interbase.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/interbase.adoc
new file mode 100644
index 0000000..2599817
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/interbase.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-interbase]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/interbase/src/main/doc/interbase.adoc
+= interbase
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included in Database installation
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | http://docwiki.embarcadero.com/InterBase/2020/en/Programming_with_JDBC[Documentation Link]
+|JDBC Url | jdbc:interbase://hostname:3050/path/to/database.ib
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/kingbasees.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/kingbasees.adoc
new file mode 100644
index 0000000..d10e434
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/kingbasees.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-kingbasees]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/kingbasees/src/main/doc/kingbasees.adoc
+= kingbase es
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | 
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://www.kingbase.com.cn/zhuanti/kes/html/jdbc.html[Documentation Link]
+|JDBC Url | jdbc:kingbase8://host:port/database
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/mariadb.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/mariadb.adoc
new file mode 100644
index 0000000..c2d44c9
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/mariadb.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-mariadb]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/mariadb/src/main/doc/mariadb.adoc
+= Mariadb
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://mariadb.com/kb/en/about-mariadb-connector-j/[Driver Link]
+|Version Included | None
+|Hop Dependencies | Mysql Database plugin
+|Documentation | https://mariadb.com/kb/en/about-mariadb-connector-j/[Documentation Link]
+|JDBC Url | 
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/monetdb.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/monetdb.adoc
new file mode 100644
index 0000000..a32dba9
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/monetdb.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-monetdb]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/monetdb/src/main/doc/monetdb.adoc
+= monetdb
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Column Store
+|Driver | Included
+|Version Included | 2.25
+|Hop Dependencies | None
+|Documentation | https://www.monetdb.org/Documentation/Manuals/SQLreference/Programming/JDBC[Documentation Link]
+|JDBC Url |  jdbc:monetdb://hostname/databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/mssql.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/mssql.adoc
new file mode 100644
index 0000000..63b3ba3
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/mssql.adoc
@@ -0,0 +1,34 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-mssql]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/mssql/src/main/doc/mssql.adoc
+= MS SqlServer
+
+This database type uses, the JTDS driver. When creating new database connections it is recommended to use the native connection type.
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://sourceforge.net/projects/jtds/files/jtds/[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | http://jtds.sourceforge.net/faq.html[Documentation Link]
+|JDBC Url | jdbc:jtds:sqlserver://<server>[:<port>][/<database>][;<property>=<value>[;...]]
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/mssqlnative.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/mssqlnative.adoc
new file mode 100644
index 0000000..66451f0
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/mssqlnative.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-mssqlnqtive]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/mssqlnative/src/main/doc/mssqlnative.adoc
+= MS SqlServer (Native)
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://docs.microsoft.com/en-us/sql/connect/jdbc/download-microsoft-jdbc-driver-for-sql-server?view=sql-server-ver15[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://docs.microsoft.com/en-us/sql/connect/jdbc/setting-the-connection-properties?view=sql-server-ver15[Documentation Link]
+|JDBC Url | jdbc:sqlserver://[serverName[\instanceName][:portNumber]][;property=value[;property=value]]
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/mysql.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/mysql.adoc
new file mode 100644
index 0000000..376eec3
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/mysql.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-mysql]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/mysql/src/main/doc/mysql.adoc
+= MySql
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://dev.mysql.com/downloads/connector/j/[Drivere Link] (Use Platform Independent)
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://dev.mysql.com/doc/connector-j/8.0/en/[Documentation Link]
+|JDBC Url | jdbc:mysql://hostname:33060/databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/netezza.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/netezza.adoc
new file mode 100644
index 0000000..117dd22
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/netezza.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-netezza]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/netezza/src/main/doc/netezza.adoc
+= Netezza
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included in database installation
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://www.ibm.com/support/knowledgecenter/SSULQD_7.2.1/com.ibm.nz.datacon.doc/c_datacon_installing_configuring_jdbc.html[Documentation Link]
+|JDBC Url | jdbc:netezza://hostname:5490/databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/oracle.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/oracle.adoc
new file mode 100644
index 0000000..afc7aa9
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/oracle.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-oracle]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/oracle/src/main/doc/oracle.adoc
+= Oracle
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://www.oracle.com/database/technologies/appdev/jdbc-downloads.html[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://docs.oracle.com/cd/E11882_01/java.112/e16548/toc.htm[Documentation Link]
+|JDBC Url | jdbc:oracle:thin:@hostname:port Number:databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/oraclerdb.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/oraclerdb.adoc
new file mode 100644
index 0000000..973729b
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/oraclerdb.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-oraclerdb]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/oraclerdb/src/main/doc/oraclerdb.adoc
+= Oracle RDB
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://www.oracle.com/database/technologies/rdb-related-products-downloads.html[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://www.oracle.com/technetwork/database/database-technologies/rdb/documentation/rdbjdbc-ug-725-129654.pdf[Documentation Link]
+|JDBC Url | jdbc:rdbThin://<node>:<port>/<database_specification>
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/postgresql.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/postgresql.adoc
new file mode 100644
index 0000000..b9a4fb4
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/postgresql.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-postgresql]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/postgresql/src/main/doc/postgresql.adoc
+= PostgreSQL
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 42.2.8
+|Hop Dependencies | None
+|Documentation | https://jdbc.postgresql.org/documentation/head/index.html[Documentation Link]
+|JDBC Url  | jdbc:postgresql://host:port/database
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/redshift.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/redshift.adoc
new file mode 100644
index 0000000..4eb1bbd
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/redshift.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-redshift]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/redshift/src/main/doc/redshift.adoc
+= Redshift
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 1.2.27.1051
+|Hop Dependencies | None
+|Documentation | https://docs.aws.amazon.com/redshift/latest/mgmt/configure-jdbc-connection.html[Documentation Link]
+|JDBC Url | jdbc:redshift://endpoint:port/database
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/sapdb.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/sapdb.adoc
new file mode 100644
index 0000000..731c60b
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/sapdb.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-sapdb]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/sapdb/src/main/doc/sapdb.adoc
+= MaxDB (SAP DB)
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included in Database installation
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://help.sap.com/saphelp_tm93/helpdata/en/37/5f6b6e966242aead8025bdc5296489/frameset.htm[Documentation Link]
+|JDBC Url | jdbc:sapdb://<database_computer>[:<port>]/<database_name>
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/snowflake.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/snowflake.adoc
new file mode 100644
index 0000000..9a62c6a
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/snowflake.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-snowflake]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/snowflake/src/main/doc/snowflake.adoc
+= Snowflake
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 3.11.1
+|Hop Dependencies | None
+|Documentation | https://docs.snowflake.net/manuals/user-guide/jdbc-configure.html[Documentation Link]
+|JDBC Url | jdbc:snowflake://<account_name>.snowflakecomputing.com/?<connection_params>
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/sqlbase.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/sqlbase.adoc
new file mode 100644
index 0000000..9c64aa2
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/sqlbase.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-sqlbase]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/sqlbqse/src/main/doc/sqlbqse.adoc
+= Gupta SQLBase
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://knowledge.opentext.com/knowledge/cs.dll/kcs/kb[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://knowledge.opentext.com/knowledge/cs.dll/kcs/kb[Documentation Link]
+|JDBC Url | jdbc:sqlbase://hostname:port/databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/sqlite.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/sqlite.adoc
new file mode 100644
index 0000000..74bc311
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/sqlite.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-sqlite]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/sqlite/src/main/doc/sqlite.adoc
+= SqLite
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 3.28.0
+|Hop Dependencies | None
+|Documentation | https://www.sqlitetutorial.net/sqlite-java/sqlite-jdbc-driver/[Documentation Link]
+|JDBC Url | jdbc:sqlite:sample.db
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/sybase.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/sybase.adoc
new file mode 100644
index 0000000..5a6d432
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/sybase.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-sybase]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/sybase/src/main/doc/sybase.adoc
+= Sybase
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 1.3.1
+|Hop Dependencies | None
+|Documentation | http://jtds.sourceforge.net/faq.html[Documentation Link]
+|JDBC Url | jdbc:jtds:sybase://<server>[:<port>][/<database>][;<property>=<value>[;...]]
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/sybaseiq.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/sybaseiq.adoc
new file mode 100644
index 0000000..5c6e00c
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/sybaseiq.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-sybaseiq]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/sybaseiq/src/main/doc/sybaseiq.adoc
+= Sybase IQ
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Column Oriented
+|Driver | Included in database installation
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | http://infocenter.sybase.com/help/index.jsp?topic=/com.sybase.infocenter.dc01776.1600/doc/html/san1357754910584.html[Documentation Link]
+|JDBC Url | jdbc:sybase:Tds:localhost:2638
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/teradata.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/teradata.adoc
new file mode 100644
index 0000000..9209a14
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/teradata.adoc
@@ -0,0 +1,31 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-teradata]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/teradata/src/main/doc/teradata.adoc
+= Teradata
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://downloads.teradata.com/download/connectivity/jdbc-driver[Driver Link]
+|Hop Dependencies | None
+|Documentation | https://teradata-docs.s3.amazonaws.com/doc/connectivity/jdbc/reference/current/frameset.html[Documentation Link]
+|JDBC Url | jdbc:teradata://Hostname
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/universe.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/universe.adoc
new file mode 100644
index 0000000..084243f
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/universe.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-universe]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/universe/src/main/doc/universe.adoc
+= IBM Universe
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included in database installation
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://www-05.ibm.com/e-business/linkweb/publications/servlet/pbi.wss?CTY=US&FNC=SRX&PBL=G251-1210-00#[Documentation Link]
+|JDBC Url | jdbc:ibmu2://localhost
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/vectorwise.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/vectorwise.adoc
new file mode 100644
index 0000000..99fda9c
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/vectorwise.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-vectorwise]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/vectorwise/src/main/doc/vectorwise.adoc
+= Vectorwise
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | Included
+|Version Included | 10.0-4.0.5
+|Hop Dependencies | None
+|Documentation | https://docs.actian.com/ingres/11.0/index.html#page/Connectivity%2FJDBC_Driver_and_Data_Source_Classes.htm%23[Documentation Link]
+|JDBC Url | jdbc:ingres://host:port{,port}{;host:port{,port}}/db{;attr=value}
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/database/databases/vertica.adoc b/hop-user-manual/modules/ROOT/pages/database/databases/vertica.adoc
new file mode 100644
index 0000000..53ccc6c
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/database/databases/vertica.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+[[database-plugins-vertica]]
+:documentationPath: /database/databases/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/database/databases/vertica/src/main/doc/vertica.adoc
+= Vertica
+
+[width="90%", cols="2*", options="header"]
+|===
+| Option | Info
+|Type | Relational
+|Driver | https://www.vertica.com/download/vertica/client-drivers/[Driver Link]
+|Version Included | None
+|Hop Dependencies | None
+|Documentation | https://www.vertica.com/docs/9.2.x/HTML/Content/Authoring/ConnectingToVertica/ClientJDBC/JDBCConnectionProperties.htm[Documentation Link]
+|JDBC Url | jdbc:vertica://VerticaHost:portNumber/databaseName
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/abort.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/abort.adoc
new file mode 100644
index 0000000..396bf1a
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/abort.adoc
@@ -0,0 +1,35 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/abort/src/main/doc/abort.adoc
+= Abort
+
+== Description
+
+This tranform type allows you abort a pipeline upon seeing input. It's main use is in error handling. For example, you can use this transform so that a pipeline can be aborted after x number of rows flow to over an error hop.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Abort threshold|The threshold of number of rows after which to abort the pipeline. E.g. If threshold is 0, the abort transform will abort after seeing the first row. If threshold is 5, the abort transform will abort after seeing the sixth row.
+|Abort message|The message to put in the log upon aborting. If not filled in a default message will be used.
+|Always log|Always log the rows processed by the Abort transform. This allows the rows to be logged although the log level of the pipeline would normally not do it. This way you can always see in the log which rows caused the pipeline to abort.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/addsequence.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/addsequence.adoc
new file mode 100644
index 0000000..435407a
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/addsequence.adoc
@@ -0,0 +1,43 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/addsequence/src/main/doc/addsequence.adoc
+= Add Sequence
+
+== Description
+
+The Add sequence transform adds a sequence to the Hop stream. A sequence is an ever-changing integer value with a specific start and increment value. You can either use a database sequence to determine the value of the sequence, or have it generated by Hop. Hop-generated sequence values are unique when used in the same pipeline, and return to the same starting value each time you run the pipeline.
+Hop sequences are unique only when used in the same pipeline. Also, they are not stored, so the values start back at the same value every time the pipeline is launched.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|The name of this transform as it appears in the pipeline workspace. This name must be unique within a single pipeline.
+|Name of value|Name of the new sequence value that is added to the stream.
+|Use DB to generate the sequence|Enable if you want the sequence to be driven by a database sequence, then set these parameters: Connection name, Schema name (optional), Sequence name.
+|Connection name|The name of the connection on which the database sequence resides.
+|Schema name (optional)|The table's schema name.
+|Sequence name|The name of the database sequence.
+|Use a pipeline counter to generate the sequence|Enable if you want the sequence to be generated by Hop, then set these parameters: Counter name (optional), Start at, Increment by, Maximum value.
+|Counter name (optional)|If multiple transforms in a pipeline generate the same value name, this option enables you to specify the name of the counter to associate with. Avoids forcing unique sequencing across multiple steps.
+|Start at|The value to begin the sequence with.
+|Increment by|The amount by which the sequence increases or decreases.
+|Maximum value|The value after which the sequence returns to the Start At value.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/addxml.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/addxml.adoc
new file mode 100644
index 0000000..45346cb
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/addxml.adoc
@@ -0,0 +1,103 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/xml/src/main/doc/addxml.adoc
+= Add XML
+
+== Description
+
+The Add XML transform allows you to encode the content of a number of fields in a row in XML. This XML is added to the row in the form of a String field.
+
+
+== Options
+
+=== Content Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Encoding|The encoding to use; this encoding is specified in the header of the XML file
+|Output Value|The name of the new field that contains the XML
+|Root XML element|The name of the root element in the generated element
+|Omit XML header|Enable to not include the XML header in the output.
+|Omit null values from the XML result|Do not add elements or attributes with null values. This is often used to limit the size of the target XML. 
+|===
+
+=== Fields Tab
+
+The Fields tab is where you configure the output fields and their formats. The table below describes each of the available properties for a field.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Fieldname|Name of the field
+|Element name|The name of the element in the XML file to use
+|Type|Type of the field can be either String, Date, or Number
+|Format|Format mask with which to convert data; see Number Formats for a complete description of format specifiers
+|Length|Output string is padded to this length if it is specified
+|Precision|The precision to use
+|Currency|Symbol used to represent currencies like $10,000.00 or E5.000,00
+|Decimal|A decimal point can be a "." (10,000.00) or "," (5.000,00)
+|Grouping|A grouping can be a "," (10,000.00) or "." (5.000,00)
+|Null|The string to use in case the field value is null.
+|Attribute|Make this an attribute (N means : element)
+|Attribute parent name|You can specify the name of the parent element to add the attribute to if previous parameter attribute is set to Y.
+If no parent name is specified, the attribute is set in the parent element. 
+|===
+
+== Use case
+
+Below is data that comes in a variety of classes and which needs to be stored as XML in a database. You want to turn the raw data into the database layout below.
+
+=== Raw data
+
+[width="90%", options="header"]
+|===
+|Shape|Colour|Id|X|Y|Radius
+|circle|blue|1|3|5|5
+|circle|red|2|1|3|5
+|circle|blue|5|5|9|5
+|circle|blue|6|8|2|5
+|circle|red|7|9|7|5 
+|===
+
+[width="90%", options="header"]
+|===
+|Shape|Colour|Id|X|Y|Length|Width
+|rectangle|blue|3|3|1|6|4
+|rectangle|red|6|2|4|6|4
+|rectangle|blue|10|8|2|6|4
+|rectangle|red|12|7|8|6|4
+|rectangle|blue|14|5|2|6|4 
+|===
+
+=== Output Sample
+
+[width="90%", options="header"]
+|===
+|Id|X|Y|Class data
+|3|4|7|<SHAPE type="circle"><COLOUR>blue</COLOUR><RADIUS> 5</RADIUS></SHAPE>
+|1|6|3|<SHAPE type="rectangle"><COLOUR>blue</COLOUR><WIDTH> 4</WIDTH><LENGTH> 6</LENGTH></SHAPE>
+|2|8|8|<SHAPE type="rectangle"><COLOUR>blue</COLOUR><WIDTH> 4</WIDTH><LENGTH>6</LENGTH></SHAPE>
+|5|5|2|<SHAPE type="circle"><COLOUR>blue</COLOUR><RADIUS> 5</RADIUS></SHAPE> 
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/analyticquery.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/analyticquery.adoc
new file mode 100644
index 0000000..0834d8e
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/analyticquery.adoc
@@ -0,0 +1,105 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/analyticquery/src/main/doc/analyticquery.adoc
+= Analytic Query
+
+== Description
+
+This transform allows you to peek forward and backwards across rows. Examples of common use cases are:
+
+* Calculate the "time between orders" by ordering rows by order date, and LAGing 1 row back to get previous order time.
+* Calculate the "duration" of a web page view by LEADing 1 row ahead and determining how many seconds the user was on this page.
+
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name| The name of this transform as it appears in the pipeline workspace.
+|Group fields table|Specify the fields you want to group. Click Get Fields to add all fields from the input stream(s). The transform will do no additional sorting, so in addition to the grouping identified (for example CUSTOMER_ID) here you must also have the data sorted (for example ORDER_DATE).
+|Analytic Functions table|Specify the analytic functions to be solved.
+|New Field Name|the name you want this new field to be named on the stream (for example PREV_ORDER_DATE)
+|Subject|The existing field to grab (for example ORDER_DATE)
+|Type
+a|Set the type of analytic function:
+
+* Lead - Go forward N rows and get the value of Subject
+
+* Lag - Go backward N rows and get the value of Subject
+|N|The number of rows to offset (backwards or forwards) 
+|===
+
+## Group field examples
+
+While it is not mandatory to specify a group, it can be useful for certain cases. If you create a group (made up of one or more fields), then the "lead forward / lag backward" operations are made only within each group. For example, suppose you have this:
+
+====
+[source,bash]
+----
+X   , Y
+--------
+aaa , 1
+aaa , 2
+aaa , 3
+bbb , 4
+bbb , 5
+bbb , 6
+----
+====
+
+And you want to create a field named Z, with the Y value in the previous row.
+
+If you only care about the Y field, you don't need to group. And you will have the following result:
+
+====
+[source,bash]
+----
+X   , Y , Z
+------------
+aaa , 1 , <null>
+aaa , 2 , 1
+aaa , 3 , 2
+bbb , 4 , 3
+bbb , 5 , 4
+bbb , 6 , 5
+----
+====
+
+But if you don't want to mix the values for aaa and bbb, you can group by the X field, and you will have this:
+
+====
+[source,bash]
+----
+X   , Y , Z
+------------
+X   , Y , Z
+------------
+aaa , 1 , <null>
+aaa , 2 , 1
+aaa , 3 , 2
+bbb , 4 , <null>
+bbb , 5 , 4
+bbb , 6 , 5
+----
+====
+
+Thus, by grouping (provided the input is sorted according to your grouping), you can be assured that lead or lag operations will not return row values outside of the defined group.
+
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/append.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/append.adoc
new file mode 100644
index 0000000..98c0a63
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/append.adoc
@@ -0,0 +1,42 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/append/src/main/doc/append.adoc
+= Append Streams
+
+== Description
+
+The "Append streams" transform reads the data from two transforms, only processing the second stream after the first is finished.
+As always, the row layout for the input data coming from both transforms has to be identical: the same row lengths, the same data types, the same fields at the same field indexes in the row.
+
+*Important*: If you don't care about the order in which the output rows occur, you can use any transform to create a union of 2 or more data streams.
+
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform. Note: This name has to be unique in a single pipeline.
+|Head hop|The name of the transform from which will be read from first.
+|Tail hop|The name of the transform from which will be read from last. 
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beambigqueryinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beambigqueryinput.adoc
new file mode 100644
index 0000000..fcfa937
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beambigqueryinput.adoc
@@ -0,0 +1,40 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beambigqueryinput.adoc
+= Beam BigQuery Input
+
+== Description
+
+The Beam BigQuery Input transform can be used to input data from link:https://cloud.google.com/bigquery[Google Cloud BigQuery] using the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Project ID|The Google Cloud project.
+|Data set ID|The BigQuery dataset ID.
+|Table ID|The BigQuery table ID.
+|Query|The input query, blank means everything from the table.
+|Return fields selection|A list of result fields.
+|BQ Field name|The field name in the BigQuery table.
+|Rename to... (optional)|The name to be given to a column.
+|Hop data type|The field data type.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beambigqueryoutput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beambigqueryoutput.adoc
new file mode 100644
index 0000000..6964b88
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beambigqueryoutput.adoc
@@ -0,0 +1,38 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beambigqueryoutput.adoc
+= Beam BigQuery Output
+
+== Description
+
+The Beam BigQuery Input transform can be used to input data from link:https://cloud.google.com/bigquery[BigQuery] using the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Project ID|The Google Cloud Platform project.
+|Data set ID|The BigQuery dataset ID.
+|Table ID|The BigQuery table ID.
+|Create table if needed|Whether or not to create the table if it does not exist.
+|Truncate table|Whether or not to truncate the table before inserting.
+|Fail if the table is not empty|Throws an error if the table is not empty.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamconsume.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamconsume.adoc
new file mode 100644
index 0000000..94a9b6a
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamconsume.adoc
@@ -0,0 +1,46 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beamconsume.adoc
+= Beam Kafka Consume
+
+== Description
+
+The Beam Kafka Consume transform link:https://kafka.apache.org/23/javadoc/index.html?org/apache/kafka/clients/consumer/KafkaConsumer.html[consumes] records from a Kafka cluster using the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Bootstrap servers|A comma separated list of hosts which are Kafka brokers in a "bootstrap" Kafka cluster.
+|Topics to consume|A comma separated list of topics to consume.
+|Group ID|Specifies the ID of a consumer group a Kafka consumer belongs to.
+|The name of the key output field|The result key field.
+|The name of the message output field|The result message field.
+|Use processing time|The time when the record is processed by Beam.
+|Use log append time|The time when the record is appended by the broker.
+|Use create time|The time when the producer record is created.
+|Restrict read to committed messages|Restricts reading to committed records only.
+|Allow offsets to be committed back|Allows committing offsets as to mark an offset as consumed.
+|Configuration options|A list of configuration parameters.
+|Parameter|A configuration parameter.
+|Value|The parameter value.
+|Type|The value data type.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beaminput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beaminput.adoc
new file mode 100644
index 0000000..6bdab30
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beaminput.adoc
@@ -0,0 +1,56 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beaminput.adoc
+= Beam Input
+
+== Description
+
+The Beam Input transform reads files using a file definition with the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Input location|The location from which to read a file.
+|File definition to use|The file definition to use stored in a metastore.
+|Edit|Edits an existing file definition.
+|New|Creates a new file definition.
+|Manage|Opens the MetaStore Explorer.
+|===
+
+== File Definition
+
+A file definition can be used to define a file structure definition.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Name|The file definition name.
+|Description|The file definition description.
+|Field separator|The character separating the fields.
+|Field enclosure|The character enclosing fields.
+|Field definitions|A list of fields.
+|Field name|The field name.
+|Type|The field data type.
+|Format|The field format.
+|Length|The field length.
+|Precision|The field precisiom.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamoutput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamoutput.adoc
new file mode 100644
index 0000000..73e1809
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamoutput.adoc
@@ -0,0 +1,59 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beamoutput.adoc
+= Beam Output
+
+== Description
+
+The Beam Output transform writes files using a file definition with the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Output location|The output location.
+|File prefix|Text which has to precede the file name.
+|File suffix|Text which has to succeed the file name.
+|Windowed writes?|Writes a file per window. Use together with the Beam Window transform.
+|File definition to use|The file definition to use stored in a metastore.
+|Edit|Edits an existing file definition.
+|New|Creates a new file definition.
+|Manage|Opens the MetaStore Explorer.
+|===
+
+== File Definition
+
+A file definition can be used to define a file structure definition.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Name|The file definition name.
+|Description|The file definition description.
+|Field separator|The character separating the fields.
+|Field enclosure|The character enclosing fields.
+|Field definitions|A list of fields.
+|Field name|The field name.
+|Type|The field data type.
+|Format|The field format.
+|Length|The field length.
+|Precision|The field precisiom.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamproduce.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamproduce.adoc
new file mode 100644
index 0000000..5216e44
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamproduce.adoc
@@ -0,0 +1,36 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beamconsume.adoc
+= Beam Kafka Produce
+
+== Description
+
+The Beam Kafka Produce transform link:https://kafka.apache.org/25/javadoc/index.html?org/apache/kafka/clients/producer/KafkaProducer.html[publishes] records to a Kafka cluster using the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Bootstrap servers|A comma separated list of hosts which are Kafka brokers in a "bootstrap" Kafka cluster.
+|The topics|The topics to publish to.
+|The field to use as key|The record key.
+|The field to use as message|The record message.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beampublisher.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beampublisher.adoc
new file mode 100644
index 0000000..f29e079
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beampublisher.adoc
@@ -0,0 +1,38 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beampublisher.adoc
+= Beam GCP Pub/Sub : Publish
+
+== Description
+
+The Beam GCP Pub/Sub : Publish transform publishes messages to a link:https://cloud.google.com/pubsub[Google Cloud Pub/Sub] topic using the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Topic to publish to|The Google Cloud Pub/Sub topic.
+|The type of message to publish a|
+ 
+* String
+* PubsubMessage
+|The field to use as message|The field which contains the message.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamsubscriber.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamsubscriber.adoc
new file mode 100644
index 0000000..3b6f0e8
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamsubscriber.adoc
@@ -0,0 +1,39 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beamsubscriber.adoc
+= Beam GCP Pub/Sub : Subscribe
+
+== Description
+
+The Beam GCP Pub/Sub : Subscribe transform gets messages from a link:https://cloud.google.com/pubsub[Google Cloud Pub/Sub] topic using a subscription with the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Subscription name|Name of the Google Cloud Pub/Sub subscription.
+|Topic to subscribe to|The Google Cloud Pub/Sub topic.
+|The type of message to read a|
+ 
+* String
+* PubsubMessage
+|The output field name|The field containing the result message.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamtimestamp.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamtimestamp.adoc
new file mode 100644
index 0000000..8b6cd46
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamtimestamp.adoc
@@ -0,0 +1,34 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beamtimestamp.adoc
+= Beam Timestamp
+
+== Description
+
+The Beam Timestamp transform adds a custom timestamp using the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|The time field to use. Uses system time if blank|The field containing the timestamp, will use system time is empty.
+|Get timestamp from stream?|Returns the event time field if it is defined within the stream.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamwindow.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamwindow.adoc
new file mode 100644
index 0000000..d84fc82
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/beamwindow.adoc
@@ -0,0 +1,63 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/plugins/engines/beam/src/main/doc/beamtimestamp.adoc
+= Beam Window
+
+== Description
+
+The Beam Window transform adds event-time-based window functions using the Beam execution engine.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform, this name has to be unique in a single pipeline.
+|Window type a|
+
+* Fixed
+* Sliding
+* Session
+* Global
+|Window size (duration in seconds)|Sets the window duration size in seconds, default 60.
+|Every x seconds (Sliding windows)|Sets the slide window duration in seconds.
+|Window start field|The field containing the window start time.
+|Window end field|The field containing the window end time.
+|Window max field|The field containing the max duration between events.
+|===
+
+== Window Types
+
+=== Fixed
+
+Fixed or tumbling windows are used to repeatedly segment data into distinct time segments and do not overlap. Events cannot belong to more than one window.
+
+=== Sliding
+
+Sliding windows produce an output only when an event occurs and continuously move forward. Every window will have at least one event and can overlap. Events can belong to more than one window.
+
+=== Session
+
+Session windows group events which arrive at similar times and filter out periods of time when there is no data.
+
+The window begins when the first event occurs and extends to include new events within a specified timeout. If events keep occurring the window will keep extending until maximum duration is reached.
+
+=== Global
+
+Global windowing is the default in Beam and ignores event time (spanning all of event time) and uses triggers to provide snapshots of that window.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/blockingtransform.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/blockingtransform.adoc
new file mode 100644
index 0000000..3cbe23f
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/blockingtransform.adoc
@@ -0,0 +1,39 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/blockingtransform/src/main/doc/blockingtransform.adoc
+= Blocking transform
+
+== Description
+
+The Blocking transform blocks all output until the very last row is received from the previous transform.
+
+At that point, the last row is sent to the next transform or the complete input is sent off to the next transform. Use the Blocking transform for triggering plugins, stored procedures, Java scripts, ... or for synchronization purposes.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline.
+|Pass all rows?|Determines whether to pass one row or all rows
+|Spool directory|The directory in which the temporary files are stored if needed; the default is the standard temporary directory for the system
+|Spool-file prefix|Choose a recognizable prefix to identify the files when they appear in the temp directory
+|Cache size|The more rows you can store in memory, the faster the transform works
+|Compress spool files?|Compresses temporary files when they are needed 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/blockuntiltransformsfinish.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/blockuntiltransformsfinish.adoc
new file mode 100644
index 0000000..504d520
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/blockuntiltransformsfinish.adoc
@@ -0,0 +1,43 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/blockuntiltransformsfinish/src/main/doc/blockuntiltransformsfinish.adoc
+= Blocking until transforms finish
+
+== Description
+
+This transform simply waits until all the transform copies that are specified in the dialog have finished. 
+
+You can use it to avoid the natural concurrency (parallelism) that exists between pipeline transform copies.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Watch the following transforms|Use this grid to specify the transforms to wait for.
+|Get transforms|Push this button to auto-fill the "Watch the following transforms" grid with all transforms available in the pipeline.
+|===
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|The name of the transform to wait for.
+|CopyNr|The (0-based) copy number of the transform. If the named transform has an explicit setting for "Change number of copies to start", and you want to wait for all copies to finish, you'll need to enter one row in the grid for each copy, and use this column to specify which copy of the transform to wait for. For the default number of copies (1), the CopyNr is always 0.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/calculator.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/calculator.adoc
new file mode 100644
index 0000000..d710e18
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/calculator.adoc
@@ -0,0 +1,147 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/calculator/src/main/doc/calculator.adoc
+= Calculator
+
+== Description
+
+This calculator transform provides you with predefined functions that can be executed on input field values.
+
+*_Note:_* The execution speed of the Calculator is far better than the speed provided by custom scripts (JavaScript).
+
+Besides the arguments (Field A, Field B and Field C) you must also specify the return type of the function. You can also choose to remove the field from the result (output) after all values are calculated; this is useful for removing temporary values.
+
+== Options
+
+[width="90%", options=header]
+|===
+|Function|Description
+|Set field to constant A|
+|Create a copy of field A|
+|A + B|A plus B.
+|A - B|A minus B.
+|A * B|A multiplied by B.
+|A / B|A divided by B.
+|A * A|The square of A.
+|SQRT( A )|The square root of A.
+|100 * A / B|Percentage of A in B.
+|A - ( A * B / 100 )|Subtract B% of A.
+|A + ( A * B / 100 )|Add B% to A.
+|A + B *C|Add A and B times C.
+|SQRT( A*A + B*B )|Calculate ?(A2+B2).
+|ROUND( A )|Returns the closest Integer to the argument. The result is rounded to an Integer by adding 1/2, taking the floor of the result, and casting the result to type int. In other words, the result is equal to the value of the expression: floor (a + 0.5).
+In case you need the rounding method "Round half to even", use the following method ROUND( A, B ) with no decimals (B=0).
+|ROUND( A, B )|Round A to the nearest even number with B decimals. The used rounding method is "Round half to even", it is also called unbiased rounding, convergent rounding, statistician's rounding, Dutch rounding, Gaussian rounding, odd-even rounding, bankers' rounding or broken rounding, and is widely used in bookkeeping. This is the default rounding mode used in IEEE 754 computing functions and operators. In Germany it is often called "Mathematisches Runden".
+|STDROUND( A )|Round A to the nearest integer. The used rounding method is "Round half away from zero", it is also called standard or common rounding. In Germany it is known as "kaufmännische Rundung" (and defined in DIN 1333).
+|STDROUND( A, B )|Same rounding method used as in STDROUND (A) but with B decimals.
+|CEIL( A )|The ceiling function map a number to the smallest following integer.
+|FLOOR( A )|The floor function map a number to the largest previous integer.
+|NVL( A, B )|If A is not NULL, return A, else B. Note that sometimes your variable won't be null but an empty string.
+|Date A + B days|Add B days to Date field A.
+Note: Only integer values for B are supported. If you need non-integer calculations, please add a second calculation with hours.
+|Year of date A|Calculate the year of date A.
+|Month of date A|Calculate number the month of date A.
+|Day of year of date|A Calculate the day of year (1-365).
+|Day of month of date A|Calculate the day of month (1-31).
+|Day of week of date A|Calculate the day of week (1-7).
+|Week of year of date A|Calculate the week of year (1-54).
+|ISO8601 Week of year of date A|Calculate the week of the year ISO8601 style (1-53).
+|ISO8601 Year of date A|Calculate the year ISO8601 style.
+|Byte to hex encode of string A|Encode bytes in a string to a hexadecimal representation.
+|Hex encode of string A|Encode a string in its own hexadecimal representation.
+|Char to hex encode of string A|Encode characters in a string to a hexadecimal representation.
+|Hex decode of string A|Decode a string from its hexadecimal representation (add a leading 0 when A is of odd length).
+|Checksum of a file A using CRC-32|Calculate the checksum of a file using CRC-32.
+|Checksum of a file A using Adler-32|Calculate the checksum of a file using Adler-32.
+|Checksum of a file A using MD5|Calculate the checksum of a file using MD5.
+|Checksum of a file A using SHA-1|Calculate the checksum of a file using SHA-1.
+|Levenshtein Distance (Source A and Target B)|Calculates the Levenshtein Distance: http://en.wikipedia.org/wiki/Levenshtein_distance
+|Metaphone of A (Phonetics)|Calculates the metaphone of A:  http://en.wikipedia.org/wiki/Metaphone
+|Double metaphone of A|Calculates the double metaphone of A: http://en.wikipedia.org/wiki/Double_Metaphone
+|Absolute value ABS(A)|Calculates the Absolute value of A.
+|Remove time from a date A|Removes time value of A. Note: Daylight Savings Time (DST) changes in Sao Paulo and some other parts of Brazil at midnight 0:00. This makes it impossible to set the time to 0:00 at the specific date, when the DST changes from 0:00 to 1:00 am. So, there is one date in one year in these regions where this function will fail with an "IllegalArgumentException: HOUR_OF_DAY: 0 -> 1". It is not an issue for Europe, the US and other regions where the time changes at 1: [...]
+|Date A - Date B (in days)|Calculates difference, in days, between A date field and B date field.
+|A + B + C|A plus B plus C.
+|First letter of each word of a string A in capital|Transforms the first letter of each word within a string.
+|UpperCase of a string A|Transforms a string to uppercase.
+|LowerCase of a string A|Transforms a string to lowercase.
+|Mask XML content from string A|Escape XML content; replace characters with &values.
+|Protect (CDATA) XML content from string A|Indicates an XML string is general character data, rather than non-character data or character data with a more specific, limited structure. The given string will be enclosed into <![CDATA[String]]>.
+|Remove CR from a string A|Removes carriage returns from a string.
+|Remove LF from a string A|Removes linefeeds from a string.
+|Remove CRLF from a string A|Removes carriage returns/linefeeds from a string.
+|Remove TAB from a string A|Removes tab characters from a string.
+|Return only digits from string A|Outputs only digits (0-9) from a string.
+|Remove digits from string A|Removes all digits (0-9) from a string.
+|Return the length of a string A|Returns the length of the string.
+|Load file content in binary|Loads the content of the given file (in field A) to a binary data type (e.g. pictures).
+|Add time B to date A|Add the time to a date, returns date and time as one value.
+|Quarter of date A|Returns the quarter (1 to 4) of the date.
+|variable substitution in string A|Substitute variables within a string.
+|Unescape XML content|Unescape XML content from the string.
+|Escape HTML content|Escape HTML within the string.
+|Unescape HTML content|Unescape HTML within the string.
+|Escape SQL content|Escapes the characters in a String to be suitable to pass to an SQL query.
+|Date A - Date B (working days)|Calculates the difference between Date field A and Date field B (only working days Mon-Fri).
+|Date A + B Months|Add B months to Date field A.
+Note: Only integer values for B are supported. If you need non-integer calculations, please add a second calculation with days.
+|Check if an XML file A is well formed|Validates XML file input.
+|Check if an XML string A is well formed|Validates XML string input.
+|Get encoding of file A|Guess the best encoding (UTF-8) for the given file.
+|Dameraulevenshtein distance between String A and String B|Calculates Dameraulevenshtein distance between strings: http://en.wikipedia.org/wiki/Damerau%E2%80%93Levenshtein_distance
+|NeedlemanWunsch distance between String A and String B|Calculates NeedlemanWunsch distance between strings: http://en.wikipedia.org/wiki/Needleman%E2%80%93Wunsch_algorithm
+|Jaro similitude between String A and String B|Returns the Jaro similarity coefficient between two strings.
+|JaroWinkler similitude between String A and String B|Returns the Jaro similarity coefficient between two string: http://en.wikipedia.org/wiki/Jaro%E2%80%93Winkler_distance
+|SoundEx of String A|Encodes a string into a Soundex value.
+|RefinedSoundEx of String A|Retrieves the Refined Soundex code for a given string object
+|Date A + B Hours|Add B hours to Date field.
+Note: Only integer values for B are supported. If you need non-integer calculations, please add a second calculation with minutes.
+|Date A + B Minutes|Add B minutes to Date field.
+Note: Only integer values for B are supported. If you need non-integer calculations, please add a second calculation with seconds.
+|Date A - Date B (milliseconds)|Subtract B milliseconds from Date field A
+|Date A - Date B (seconds)|Subtract B seconds from Date field A.
+Note: Only integer values for B are supported. If you need non-integer calculations, please add a second calculation with milliseconds.
+|Date A - Date B (minutes)|Subtract B minutes from Date field A.
+Note: Only integer values for B are supported. If you need non-integer calculations, please add a second calculation with seconds.
+|Date A - Date B (hours)|Subtract B hours from Date field A.
+Note: Only integer values for B are supported. If you need non-integer calculations, please add a second calculation with minutes.
+|Hour of Day of Date A|Extract the hour part of the given date
+|Minute of Hour of Date A|Extract the minute part of the given date
+|Second of Hour of Date A|Extract the second part of a given date
+|===
+
+== FAQ on length and precision and data types affecting the results
+
+*Q*: I made a pipeline using A/B in a calculator transform and it rounded wrong: the 2 input fields are integer but my result type was Number(6, 4) so I would expect the integers to be cast to Number before executing the division.
+
+If I wanted to execute e.g. 28/222, I got 0.0 instead of 0.1261 which I expected. So it seems the result type is ignored. If I change the input types both to Number(6, 4) I get as result 0.12612612612612611 which still ignores the result type (4 places after the comma).
+
+Why is this?
+
+*A*: Length & Precision are just metadata pieces.
+
+If you want to round to the specified precision, you should do this in another transform. However: please keep in mind that rounding double point precision values is futile anyway. A floating point number is stored as an approximation (it floats) so 0.1261 (your desired output) could (would probably) end up being stored as 0.126099999999 or 0.1261000000001 (Note: this is not the case for BigNumbers)
+
+So in the end we round using BigDecimals once we store the numbers in the output table, but NOT during the pipeline. The same is true for the Text File Output transform. If you would have specified Integer as result type, the internal number format would have been retained, you would press "Get Fields" and it the required Integer type would be filled in. The required conversion would take place there and then.
+
+In short: we convert to the required metadata type when we land the data somewhere, NOT BEFORE.
+
+*Q*: How do the data types work internally?
+*A*: You might notice that if you multiply an Integer and Number, the result is always rounded.  That is because Calculator takes data type of the left hand size of the multiplication (A) as the driver for the calculation.
+As such, if you want more precision, you should put field B on the left hand side or change the data type to Number and all will be well.
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/changefileencoding.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/changefileencoding.adoc
new file mode 100644
index 0000000..4608f40
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/changefileencoding.adoc
@@ -0,0 +1,38 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/changefileencoding/src/main/doc/changefileencoding.adoc
+= Change file encoding
+
+== Description
+
+This transform changes a text file from one encoding to another. 
+
+== Options
+
+
+* Transform name : the name of the transform, unique in a pipeline
+* Source file
+  ** Filename field : the name of the input field which will carry the file names at runtime
+  ** Source encoding: the encoding of the text files
+  ** Add source filename to the result filenames? : Check this if you want to add the source file(s) automatically to the files list of the result of the pipeline.
+* Target file
+  ** Target filename field: the name of the input field which will contain the target (output) file name at runtime
+  ** Create parent folder: checking this field automatically creates the output folder specified in the target filename field.
+  ** Target encoding: you can select the target encoding here
+  ** Add target filename to the result filenames? : Check this if you want to add the target file(s) automatically to the files list of the result of the pipeline.
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/checksum.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/checksum.adoc
new file mode 100644
index 0000000..e4adb87
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/checksum.adoc
@@ -0,0 +1,43 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/checksum/src/main/doc/checksum.adoc
+= Add a checksum
+
+== Description
+
+This transform calculates checksums for one or more fields in the input stream and adds this to the output as a new field.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform. Note: This name has to be unique in a single pipeline.
+|Type a|The type of checksum that needs to be calculated.  These are the types that are available:
+
+* CRC32 : 32-bit Cyclic Redundancy Check : http://en.wikipedia.org/wiki/Cyclic_redundancy_check
+*ADLER 32 : Checksum algorithm by Mark Adler: http://en.wikipedia.org/wiki/Adler-32
+* MD5 : Message Digest algorithm 5 : http://en.wikipedia.org/wiki/MD5
+* SHA-1 : Secure Hash Algorithm 1 : http://en.wikipedia.org/wiki/SHA-1
+* SHA-256 : Secure Hash Algorithm 2 : http://en.wikipedia.org/wiki/SHA-256
+|Result Type|Some checksum types allow to set different result types: String, Hexadecimal and Binary
+|Result field|The name of the result field containing the checksum
+|Fields used in the checksum|The names of the fields to include in the checksum calculation.
+Note: You can use the "Get Fields" button to insert all input fields from previous transforms.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/clonerow.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/clonerow.adoc
new file mode 100644
index 0000000..a57e3b9
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/clonerow.adoc
@@ -0,0 +1,38 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/clonerow/src/main/doc/clonerow.adoc
+= Clone row
+
+== Description
+
+This transform creates copies (clones) of a row and outputs them directly after the original row to the next transforms.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform. Note: This name has to be unique in a single pipeline.
+|Nr clones|The number of clones you want to add after the original row.
+|Add clone flag a|Check this option if you want to add a boolean field in the output indicating if the row is a clone or not.
+
+* N / false : this is not a cloned row, it's the original row
+* Y / true : this is a cloned row, a copy of the original row
+|Clone flag field|The name of the clone flag field 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/closure.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/closure.adoc
new file mode 100644
index 0000000..b8a3c53
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/closure.adoc
@@ -0,0 +1,37 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/closure/src/main/doc/closure.adoc
+= Closure
+
+== Description
+
+This transform was created to allow you to generate a Reflexive Transitive Closure Table for Mondrian.  For more information on how a closure table can help Mondrian gain performance, go here
+Technically, this transform reads all input rows in memory and calculates all possible parent-child relationships.  It attaches the distance (in levels) from parent to child.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|The name that uniquely identifies the transform.
+|Parent ID field|The field name that contains the parent ID of the parent-child relationship.
+|Child ID field|The field name that contains the child ID of the parent-child relationship.
+|Distance field name|The name of the distance field that will be added to the output
+|Root is zero|Check this box if the root of the parent-child tree is not empty (null) but zero (0) 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/coalesce.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/coalesce.adoc
new file mode 100644
index 0000000..4c15412
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/coalesce.adoc
@@ -0,0 +1,46 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/coalesce/src/main/doc/coalesce.adoc
+= Coalesce
+
+== Description
+
+Lets you combine multiple fields into one, selecting the first value that is non-null.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform. *Note*: This name has to be unique in a single pipeline.
+|Considered empty string as null|The transform can consider empty string as null.
+|===
+
+== Fields
+|Fields|specify the name, type, and value in the form of a string. Then, specify the formats to convert the value into the chosen data type.
+
+[width="90%", options="header"]
+|===
+|Name|The result field name, can overwrite an existing one.
+|Type|The formats to convert the value into the chosen data type.
+|Remove|Remove input fields from the stream.
+|Input fields|The order of the input fields listed in the columns determines the order in which they are evaluated.
+|===
+
+NOTE: Support MetaData Injection (MDI)
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/columnexists.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/columnexists.adoc
new file mode 100644
index 0000000..9b85fbf
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/columnexists.adoc
@@ -0,0 +1,39 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/columnexists/src/main/doc/columnexists.adoc
+= Column exists
+
+== Description
+
+This transforms allows you to verify the existence of a specific column in a database table.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; This name has to be unique in a single pipeline
+|Connection|The database connection to use
+|Schema name|(optional) The schema name of the table of the column to check
+|Table name|The name of the table of the column to check
+|Tablename in field?|Enable to read the name of the table in an input field
+|Tablename field|Specify the fieldns containing parameters and the parameter type
+|Columnname field|The name of the column field in the input stream
+|Result fieldname|The name of the resulting boolean flag field 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/combinationlookup.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/combinationlookup.adoc
new file mode 100644
index 0000000..d478380
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/combinationlookup.adoc
@@ -0,0 +1,66 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/combinationlookup/src/main/doc/combinationlookup.adoc
+= Combination lookup/update
+
+== Description
+
+The Combination Lookup-Update transform allows you to store information in a junk-dimension table, and can possibly also be used to maintain Kimball pure Type 1 dimensions.
+
+This transform will...
+
+1. Look up combination of business key field1... fieldn from the input stream in a dimension table
+2. If this combination of business key fields exists, return its technical key (surrogate id);
+3. If this combination of business key doesn't exist yet, insert a row with the new key fields and return its (new) technical key
+4. Put all input fields on the output stream including the returned technical key, but remove all business key fields if "remove lookup fields" is true.
+
+This transform creates/maintains a technical key out of data with business keys. After passing through this transform all of the remaining data changes for the dimension table can be made as updates, as either a row for the business key already existed or was created.
+
+This transform will maintain the key information only. You must update the non-key information in the dimension table, e.g. by putting an update transform (based on technical key) after the combination update/lookup transform.
+
+Apache Hop (Incubating) will store the information in a table where the primary key is the combination of the business key fields in the table. Because this process can be slow if you have a large number of fields, a "hash code" field is supported that is representing all fields in the dimension. This can speed up lookup performance dramatically while limiting the fields to index to one field only.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Connection|Name of the database connection on which the dimension table resides.
+|Target schema|This allows you to specify a schema name.
+|Target table|Name of the dimension table.
+|Commit size
+|Define the commit size, e.g. setting this to 10 will generate a commit every 10 inserts or updates.
+|Cache size in rows |This is the cache size in number of rows that will be held in memory to speed up lookups by reducing the number of round trips to the database.
+
+A cache size of 0 caches as many rows as possible and until your JVM runs out of memory. Use this option wisely with dimensions that can't grown too large.
+A cache size of -1 means that caching is disabled.
+|Key fields|Specify the names of the keys in the stream and in the dimension table. This will enable the transform to do the lookup.
+|Technical key field|This indicates the primary key of the dimension. It is also referred to as Surrogate Key.
+|Creation of technical key a|Specify howthe technical key is generated, options which are not available for your connection will be grayed out:
+
+* Use table maximum + 1: A new technical key will be created from the maximum key in the table. Note that the new maximum is always cached, so that the maximum does not need to be calculated for each new row.
+* Use sequence: Specify the sequence name if you want to use a database sequence on the table connection to generate the technical key (typical for Oracle e.g.).
+* Use auto increment field: Use an auto increment field in the database table to generate the technical key (supported e.g. by DB2).
+|Remove lookup fields?|Enable this option if you want to remove all the lookup fields from the input stream in the output. The only extra field added is then the technical key.
+|Use hashcode|This option allows you to generate a hash code, representing all values in the key fields in a numerical form (a signed 64 bit integer). This hash code has to be stored in the table.
+|Date of last update field|When required, specify the date of last update field (timestamp) from the source system to be copied to the data warehouse. For example, when you have an address without a primary key. The field will not be part of the lookup fields (nor be part in the hash code calculation). The value is written once only because any change results in a new record being written.
+|Get Fields button|Fills in all the available fields on the input stream, except for the keys you specified.
+|SQL button|Generates the SQL to build the dimension and allows you to execute this SQL. 
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/concatfields.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/concatfields.adoc
new file mode 100644
index 0000000..2981745
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/concatfields.adoc
@@ -0,0 +1,89 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/concatfields/src/main/doc/concatfields.adoc
+= Concat Fields
+
+== Description
+
+The Concat Fields transform is used to concatenate multiple fields into one target field. The fields can be separated by a separator and the enclosure logic is completely compatible with the Text File Output transform.
+
+== Options
+
+=== General
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Target Field Name|The name of the target field (String type)
+|Length of Target Field|The length of the string type (setting the meta-data of the String type, this is also used by the Fast Data Dump option for performance optimization)
+|Separator|Specify the character that separates the fields in a single line of text. Typically this is ; or a tab.
+|Enclosure|A pair of strings can enclose some fields. This allows separator characters in fields. The enclosure string is optional. 
+|===
+
+=== Fields Tab
+
+This is identical to the fields tab option of the Text File Output transform and has the same functionality.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Name|The name of the field.
+|Type|Type of the field can be either String, Date or Number.
+|Format|The format mask to convert with. See Number Formats for a complete description of format symbols.
+|Length a|The length option depends on the field type follows:
+
+- Number - Total number of significant figures in a number
+
+- String - total length of string
+
+- Date - length of printed output of the string (e.g. 4 only gives back year)
+
+|Precision a|The precision option depends on the field type as follows:
+
+- Number - Number of floating point digits
+- String - unused
+- Date - unused
+
+|Currency|Symbol used to represent currencies like $10,000.00 or E5.000,00
+|Decimal|A decimal point can be a "." (10,000.00) or "," (5.000,00)
+|Group|A grouping can be a "," (10,000.00) or "." (5.000,00)
+|Trim type|The trimming method to apply on the string. Trimming only works when there is no field length given.
+|Null|If the value of the field is null, insert this string into the textfile
+|Get|Click to retrieve the list of fields from the input fields stream(s)
+|Minimal width|Alter the options in the fields tab in such a way that the resulting width of lines in the text file is minimal. So instead of save 0000001, we write 1, etc. String fields will no longer be padded to their specified length. 
+|===
+
+=== Advanced Tab
+
+
+[width="90%", options="header"]
+|===
+|Remove selected fields|Check this to remove all selected fields from the output stream.
+|Force the enclosure around fields?|This option forces all field names to be enclosed with the character specified in the Enclosure property above.
+|Disable the enclosure fix?|This is for backward compatibility reasons related to enclosures and separators. When a string field contains an enclosure it gets enclosed and the enclose itself gets escaped. When a string field contains a separator, it gets enclosed. Check this option, if this logic is not wanted. It has also an extra performance burden since the strings are scanned for enclosures and separators. So when you are sure there is no such logic needed since your strings don't ha [...]
+|Header|Enable this option if you want a header row. (First line in the stream). Note: All other output stream fields are set to Null when this line is produced.
+|Footer|Enable this option if you want a footer row. (Last line in the stream). Note: All other output stream fields are set to Null when this line is produced.
+|Encoding|Specify the String encoding to use. Leave blank to use the default encoding on your system. To use Unicode specify UTF-8 or UTF-16. On first use, Spoon will search your system for available encodings. Note: This is needed especially when you concatenate different encoded fields into the target field with a unique encoding. This applies also on Binary stored string fields due to Lazy conversion.
+|Right pad fields|Add spaces to the end of the fields (or remove characters at the end) until they have the specified length.
+|Fast data dump (no formatting)|Improves the performance when concatenating large amounts of data by not including any formatting information. Please consider setting the "Length of Target Field" option to an approximately maximum of the target field length. This improves performance since the internal buffer will be allocated and needs no reallocation when it is not sufficient.
+Note: When then "Length of Target Field" option is "0", the internal buffer size is calculated as 50 times the number of concatenated fields, for instance an internal buffer of 250 is used by default for 5 fields.
+|Split every ... rows|If this number N is larger than zero, split the resulting stream into multiple parts of N rows. Note: This is only needed when a Header/Footer is used to be compatible with the result of the Text File Output transform.
+|Add Ending line of file|Allows you to specify an alternate ending row to the output stream. Note: All other output stream fields are set to Null when this line is produced. 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/constant.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/constant.adoc
new file mode 100644
index 0000000..95ad002
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/constant.adoc
@@ -0,0 +1,33 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/constant/src/main/doc/constant.adoc
+= Add Constants
+
+== Description
+
+The Add constant values transform is a simple and high performance way to add constant values to the stream.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform. *Note*: This name has to be unique in a single pipeline.
+|Fields|specify the name, type, and value in the form of a string. Then, specify the formats to convert the value into the chosen data type.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/creditcardvalidator.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/creditcardvalidator.adoc
new file mode 100644
index 0000000..a7d08d0
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/creditcardvalidator.adoc
@@ -0,0 +1,39 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/creditcardvalidator/src/main/doc/creditcardvalidator.adoc
+= Credit card validator
+
+== Description
+
+The Credit card validator transform will help you check the following:
+
+* The validity of a credit card number. This uses a LUHN10 (MOD-10) algorithm.
+* The credit card vendor that handles the number: VISA, MasterCard, Diners Club, EnRoute, American Express (AMEX),...
+
+
+== Options
+
+
+* transform name: the transform name, unique in a pipeline
+* Credit card field: the name of the input field that will contain the credit card number during execution
+* Get only digits? : Enable this option if you want to strip all non-numeric characters from the (String) input field
+* Output Fields
+  ** Result fieldname: the name of the (Boolean) output field indicating the validity of the number
+  ** Credit card type field: the name of the output field that will hold the credit card type (vendor)
+  ** Not valid message: the name of the output field that will hold the error message.
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/csvinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/csvinput.adoc
new file mode 100644
index 0000000..1d4fdd4
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/csvinput.adoc
@@ -0,0 +1,60 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/textfile/src/main/doc/csvinput.adoc
+= CSV File Input
+
+== Description
+
+This transform provides the ability to read data from a delimited file.  The CSV label for this transform is a misnomer because you can define whatever separator you want to use, such as pipes, tabs, and semicolons; you are not constrained to using commas. Internal processing allows this transform to process data quickly. Options for this transform are a subset of the Text File Input transform.
+
+This transform has fewer overall options than the general Text File Input transform, but it has a few advantages over it:
+
+* NIO -- Native system calls for reading the file means faster performance, but it is limited to only local files currently. No VFS support.
+* Parallel running -- If you configure this transform to run in multiple copies or in clustered mode, and you enable parallel running, each copy will read a separate block of a single file allowing you to distribute the file reading to several threads or even several slave nodes in a clustered pipeline.
+* Lazy conversion -- If you will be reading many fields from the file and many of those fields will not be manipulate, but merely passed through the pipeline to land in some other text file or a database, lazy conversion can prevent Hop from performing unnecessary work on those fields such as converting them into objects such as strings, dates, or numbers.
+
+For information on valid date and numeric formats used in this transform, view the Date Formatting Table and Number Formatting Table.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Filename *or* the filename field (data from previous transforms)|Specify the name of the CSV file to read from. *or* Select the fieldname that will contain the filename(s) to read from. If this transform receives data from a previous transform, this option is enabled as well as the option to include the filename in the output.
+|Delimiter|Specify the file delimiter character used in the target file. Special characters (e.g. CHAR HEX01) can be set with the format $[value], e.g. $[01] or $[6F,FF,00,1F].
+|Enclosure|Specify the enclosure character used in the target file. Special characters (e.g. CHAR HEX01) can be set with the format $[value], e.g. $[01] or $[6F,FF,00,1F].
+|NIO buffer size|This is the size of the read buffer.  It represents the amount of bytes that is read in one time from disk.
+|Lazy conversion|The lazy conversion algorithm will try to avoid unnecessary data type conversions and can result in a significant performance improvements if this is possible.  The typical example that comes to mind is reading from a text file and writing back to a text file.
+|Header row present?|Enable this option if the target file contains a header row containing column names.
+|Add filename to result|Adds the CSV filename(s) read to the result of this pipeline.  A unique list is being kept in memory that can be used in the next workflow action in a workflow, for example in another pipeline.
+|The row number field name (optional)|The name of the Integer field that will contain the row number in the output of this transform.
+|Running in parallel?|Check this box if you will have multiple instances of this transform running (transform copies) and if you want each instance to read a separate part of the CSV file(s). When reading multiple files, the total size of all files is taken into consideration to split the workload. In that specific case, make sure that ALL transform copies receive all files that need to be read, otherwise, the parallel algorithm will not work correctly (for obvious reasons). WARNING: For [...]
+|File Encoding|Specify the encoding of the file being read.
+|Fields Table|This table contains an ordered list of fields to be read from the target file.
+|Preview button|Click to preview the data coming from the target file.
+|Get Fields button|Click to return a list of fields from the target file based on the current settings (i.e. Delimiter, Enclosure, etc.). All fields identified will be added to the Fields Table.
+|===
+
+== Metadata Injection Support
+
+You can use the Metadata Injection supported fields with ETL Metadata Injection transform to pass metadata to your pipeline at runtime. The following Option and Value fields of the CSV File Input transform support metadata injection:
+
+* *Options*: Filename, Delimiter, Enclosure, NIO Buffer Size, Lazy Conversion, Header Row Present?, Add Filename to Result, The Row Number Field Name, Running in Parallel?, and File Encoding
+* *Values*: Name, Length, Decimal, Type, Precision, Group, Format, Currency, and Trim Type
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/cubeinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/cubeinput.adoc
new file mode 100644
index 0000000..1c595df
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/cubeinput.adoc
@@ -0,0 +1,35 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/cubeinput/src/main/doc/cubeinput.adoc
+= Cube input
+
+== Description
+
+The De-serialize from file transform, formerly known as Cube Input, reads rows of data from a binary Hop file containing rows and metadata.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform Name|Name of the transform; this name has to be unique in a single pipeline.
+|Filename|The name of the Hop cube file to be generated.
+|Limit|Allows you to limit the number of rows written to
+|Size|the cube file. A value of zero (0) indicates no size limit (optional). 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/cubeoutput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/cubeoutput.adoc
new file mode 100644
index 0000000..60f9215
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/cubeoutput.adoc
@@ -0,0 +1,26 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/cubeoutput/src/main/doc/cubeoutput.adoc
+= Cube output
+
+== Description
+
+The Serialize to file transform, formerly known as Cube File Output, stores rows of data in a binary format in a file. The binary format has an advantage over a text (flat) file because the content does not have to be parsed when read back. The metadata is stored in the file as well.
+
+The Serialize to file transform supports a write-once access pattern, and does not support appending to existing files.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/databasejoin.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/databasejoin.adoc
new file mode 100644
index 0000000..4655033
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/databasejoin.adoc
@@ -0,0 +1,56 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/databasejoin/src/main/doc/databasejoin.adoc
+= Database Join
+
+== Description
+
+The Database Join transform allows you to run a query against a database using data obtained from previous transforms. The parameters for this query are specified as follows:
+
+The data grid in the transform properties dialog.  This allows you to select the data coming in from the source hop.
+As question marks (?) in the SQL query.  When the transform runs, these will be replaced with data coming in from the fields defined from the data grid.  The question marks will be replaced in the same order as defined in the data grid.
+For example, Database Join allows you to run queries looking up the oldest person who bought a specific product as shown below:
+
+[source,sql]
+----
+SELECT      customernr
+FROM        product_orders, customer
+WHERE       orders.customernr = customer.customernr
+AND         orders.productnr = ?
+ORDER BY    customer.date_of_birth
+----
+
+The grid is then defined as follows:
+
+image::databasejoin-grid.png[]
+
+When the transform runs, the (?) placeholder defined in the SQL query will be replaced with the incoming productnr field value from the source hop.  To define and use multiple parameters, list the fields in order you want them to be replaced in the SQL statement.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|Name of the transform; This name has to be unique in a single pipeline
+|Connection|The database connection to use for the query.
+|SQL|SQL query to form the join; use question marks as parameter placeholders
+|Number of rows to return|Zero (0) returns all rows; any other number limits the number of rows returned.
+|Outer join?|Enable to always return a result, even if the query did not return a result
+|Parameters table|Specify the fields containing parameters.  The parameter type is required.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/databaselookup.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/databaselookup.adoc
new file mode 100644
index 0000000..5236632
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/databaselookup.adoc
@@ -0,0 +1,62 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/databaselookup/src/main/doc/databaselookup.adoc
+= Database Lookup
+
+== Description
+
+The Database lookup transform allows you to look up values in a database table. Lookup values are added as new fields onto the stream.
+
+== Options
+
+[width="90%", options="header]
+|===
+|Option|Description
+|Transform name|Name of the transform. This name has to be unique in a single pipeline.
+|Connection|Database connection to the lookup table.
+|Lookup schema|Database schema containing the lookup table.
+|Lookup Table|Name of the database table used for the lookup.
+|Enable cache?|Enables caching of database lookups. This means that once a key (or group of key) has been looked up, the looked up values are stored, and returned again the next time this key (or group of key) is being looked up (without incurring the cost of a database call).
+
+*Important*: If other processes are changing values in the table where you perform a lookup, do not cache values. In all other instances, caching values increases the performance substantially because database lookups are relatively slow. If you can't use the cache, consider launching several copies of the simultaneously. A simultaneous launch keeps the database busy through  different connections. See Launching several copies of a transform.
+|Cache size in rows|The size of the cache (number of rows), 0 means cache everything.
+|Load all data from table|Pre-loads the cache with all the data present in the lookup table.  This may improve performance by avoiding database calls.
+However, if you have a large table, you risk running out of memory.
+
+*IMPORTANT*:  In memory look-ups can lead to different results because of the differences in the way your database compares data. For example, if your database table allows case-insensitive comparisons, you may get different results with this option. The same goes for data with trailing spaces. Those are ignored in certain databases when using certain character data types. See more details on the below note about this option.
+|Keys to look up table|The keys and conditions to perform the database lookup.
+
+*Tip*: When using the LIKE operator on the key lookup it's not automatically adding wildcards to the stream value.  So by default the LIKE operator is behaving as an "=" (see your database for SQL specific topics on the LIKE operator). It is possible to add the wildcards to add a 'Replace in String' transform before the database lookup.  In the replace transform select the lookup field then use Regex to look for (^.*) and replace with %$1%.  This will add the wildcards to the field for t [...]
+|Values to return table|The fields from the lookup table to add to the output stream.
+"New name" allows you to use a different name if the database column name is inappropriate.
+"Default" is the value returned (instead of null) if the lookup fails. Note that enabling error handling on the Database Lookup transform will redirect all failed lookup rows to the error handling transform, instead of adding them to the main output steam with null/default values.
+"Type" is the type of the output field.
+|Do not pass the row if the lookup fails|Enable to avoid passing a row when lookup fails. Within the SQL syntax, enabling this would be an INNER JOIN, otherwise it would be an OUTER JOIN.
+|Fail on multiple results?|Enable to force the transform to fail if the lookup returns multiple results.
+|Order by|If the lookup query returns multiple results, the ORDER BY clause helps you to select the record to take. For example, ORDER BY would allow you to pick the customer with the highest sales volume in a specified state.
+|Get Fields|Click to return a list of available fields from the input stream(s) of the transform.
+|Get lookup fields|Click to return a list of available fields from the lookup table that can be added to the transform's output stream.
+|===
+
+*Note on the Load all data from table option*: As an example: when storing a value of type "CHAR(3)", many databases will store "ab" using 3 characters: "ab " (notice the blank space). When you do a "SELECT * FROM my_lookup_table WHERE key_column = 'ab'", the database is smart and adds a blank space before applying the where clause (it looks for 'ab ' instead of 'ab').
+
+The problem occurs when you use "load all data from table" to pre-load the cache: at startup, the cache is built using all the values present in the database, so we store 'ab ' in the cache. Later, looking up 'ab' fails, because the trailing space matters in java equality.
+
+Note that this problem does not occur when using the cache, if "load all data from table" is disabled. Indeed, the first lookup for 'ab' would find no entry in the cache, call the database (which is smart enough to handle the trailing whitespace problem), get the correct result, and store it in the java cache under the 'ab' key. Thus, the next lookup for 'ab' will find the appropriate result in the cache :)
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/datagrid.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/datagrid.adoc
new file mode 100644
index 0000000..531b7d0
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/datagrid.adoc
@@ -0,0 +1,41 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/datagrid/src/main/doc/datagrid.adoc
+= Data Grid
+
+== Description
+
+The Data Grid transform allows you to enter a static list of rows in a grid.  This is usually done for testing, reference or demo purposes.
+
+== Options
+
+[width="90%, options="header"]
+|===
+|Meta tab|specify the field metadata (output specification) of the data
+|Data Tab|contains the data.  Everything is entered in String format so make sure you use the correct format masks in the metadata tab.
+|===
+
+== Metadata Injection Support
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
+
+== Limitations
+It is not yet possible to insert columns in between 2 existing one or in general re-order the fields in the metadata tab.
+
+If you plan to do this, make sure to copy the data section first to a spreadsheet (Select all rows and copy/paste the whole grid), change the data and copy it back into the data section.
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dbproc.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dbproc.adoc
new file mode 100644
index 0000000..9ebd8c4
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dbproc.adoc
@@ -0,0 +1,45 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/dbproc/src/main/doc/dbproc.adoc
+= Call DB procedure
+
+== Description
+
+The Call DB Procedure transform allows you to execute a database procedure (or function) and get the result(s) back. With MySQL and JDBC, it is not possible to retrieve the result set of a stored procedure. Stored procedures and functions can only return values through their function arguments and those arguments must be defined in the Parameters section of the DB Procedure Call configuration.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline
+|Connection|Name of the database connection on which the procedure resides
+|Proc-name|Name of the procedure or function to call
+|Find it|Click to search on the specified database connection for available procedures and functions (Oracle and SQL Server only)
+|Enable auto|In some instances you want to perform updates
+|commit|in the database using the specified procedure. In that case you can either have the changes performed using auto-commit or not. If auto-commit is disabled, a single commit is being performed after the last row is received by this transform.
+|Result name|Name of the result of the function call; leave blank if this is a procedure
+|Result type|Type of the result of the function call; not used in case of a procedure.
+|Parameters a|List of parameters that the procedure or function needs
+
+* Field name: Name of the field.
+* Direction: Can be either IN (input only), OUT (output only), INOUT (value is changed on the database).
+* Type: Used for output parameters so that Hop knows what returns
+|Get Fields|Fills in all the fields in the input streams to make your life easier; delete the lines you don't need and re-order the remaining lines 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/delay.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/delay.adoc
new file mode 100644
index 0000000..0a30628
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/delay.adoc
@@ -0,0 +1,35 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/delay/src/main/doc/delay.adoc
+= Delay row
+
+== Description
+
+For each input row, the "Delay row" transform will wait the indicated timeout period before giving the row to the next transform.
+
+Use this transform if you deliberately want to slow down your pipeline.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|Name of the transform. Note: This name has to be unique in a single pipeline.
+|Timeout|The timeout value in seconds, minutes or hours 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/delete.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/delete.adoc
new file mode 100644
index 0000000..435dd17
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/delete.adoc
@@ -0,0 +1,39 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/delete/src/main/doc/delete.adoc
+= Delete
+
+== Description
+
+The delete transform deletes rows of data from a database. This transform is similar to the update family of transforms in that it takes one or more key fields to determine the rows to delete.
+
+image::delete.png[]
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform. This name has to be unique in a single pipeline.
+|Connection|The database connection to which data is written
+|Target schema|The name of the Schema for the table to which data is written. This is important for data sources that allow for table names with periods in them.
+|Target table|Name of the table in which you want to do the insert or update.
+|Commit size|The number of rows to change (insert / update) before running a commit.
+|The keys(s) to look up the value(s)|Specify fields to delete the corresponding rows for
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/denormaliser.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/denormaliser.adoc
new file mode 100644
index 0000000..dc767a7
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/denormaliser.adoc
@@ -0,0 +1,57 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/denormaliser/src/main/doc/denormaliser.adoc
+= Row Denormaliser
+
+== Description
+
+The De-normalizer transform allows you de-normalize data by looking up key-value pairs. It also allows you to immediately convert data types.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Transform name|Name of the transform. This name has to be unique in a single pipeline.
+|Key field|The field that defined the key of the output row.
+|Group fields|Specify the fields that make up the grouping here.
+|Target fields|Select the fields to de-normalize by specifying the String value for the key field (see above).
+Options are provided to convert data types.
+Strings are most common as key-value pairs so you must often convert to Integer, Number or Date.
+If you get key-value pair collisions (key is not unique for the group specified) specify the aggregation method to use.
+|===
+
+== Metadata Injection Support
+You can use the Metadata Injection supported fields with ETL Metadata Injection transform to pass metadata to your pipeline at runtime. The following Value fields of the Row Denormaliser transform support metadata injection:
+
+* Target Filename, Value Fieldname, Key Value, Type, Format, Length, Precision, Currency, Decimal, Group, and Aggregation
+
+== Example
+
+[width="90%", options="header"]
+|===
+|CustomerId|Key|Value
+|101|COUSINS_FIRST_NAME|Mary
+|101|COUSINS_SECOND_NAME|J.
+|101|COUSINS_LAST_NAME|Blige
+|101|COUSINS_BIRTH_DATE|1969/02/14
+|101|COUSINS_INCOME|1723.86
+|101|...45 more keys...|...
+|===
+
+image::denormaliser.png[]
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/detectemptystream.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/detectemptystream.adoc
new file mode 100644
index 0000000..b268841
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/detectemptystream.adoc
@@ -0,0 +1,35 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/detectemptystream/src/main/doc/detectemptystream.adoc
+= Detect Empty Stream
+
+== Description
+
+This transform will output one row if input stream is empty (ie when input stream does not contain any row). The output row will have the same fields as the input row, but all field values will be empty (null).
+
+If the input stream is not empty it will not output anything.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform. This name must be unique throughout the pipeline.
+|===
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/detectlastrow.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/detectlastrow.adoc
new file mode 100644
index 0000000..0e5cd62
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/detectlastrow.adoc
@@ -0,0 +1,33 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/detectlastrow/src/main/doc/detectlastrow.adoc
+= Identify last row in a stream
+
+== Description
+
+The Identify last row in a stream pipeline transform generates a Boolean field filled with true for the last row, and falseotherwise.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of this transform as it appears in the pipeline workspace
+|Result fieldname|Defines the field to use to mark the last row of a stream. It generates a Boolean field filled with true for the last row, and falseotherwise. 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dimensionlookup.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dimensionlookup.adoc
new file mode 100644
index 0000000..eea10f5
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dimensionlookup.adoc
@@ -0,0 +1,128 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/dimensionlookup/src/main/doc/dimensionlookup.adoc
+= Dimension lookup/update
+
+== Description
+
+The Dimension Lookup/Update transform allows you to implement Ralph Kimball's slowly changing dimension for both types: Type I (update) and Type II (insert) together with some additional functions.
+Not only can you use this transform to update a dimension table, it may also be used to look up values in a dimension. 
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Technical key|This is the primary key of the dimension.
+|Version field|Shows the version of the dimension entry (a revision number).
+|Start of date range|This is the field name containing the validity starting date.
+|End of date range|This is the field name containing the validity ending date.
+|Keys|These are the keys used in your source systems. For example: customer numbers, product id, etc.
+|Fields|These fields contain the actual information of a dimension. 
+|===
+
+As a result of the lookup or update operation of this transform type, a field is added to the stream containing the technical key of the dimension. In case the field is not found, the value of the dimension entry for not found (0 or 1, based on the type of database) is returned.
+
+A number of optional fields (in the "Fields" tab) are automatically managed by the transform. You can specify the table field name in the "Dimension Field" column.  These are the optional fields:
+
+* Date of last insert or update (without stream field as source) : adds and manges a Date field
+* Date of last insert (without stream field as source) : adds and manges a Date field
+* Date of last update (without stream field as source) : adds and manges a Date field
+* Last version (without stream field as source) : adds and manges a Boolean field. (converted into Char(1) or boolean database data type depending on your database connection settings and availability of such data type)
+* This acts as a current valid dimension entry entry indicator for the last version: So when a type II attribute changes and a new version is created (to keep track of the history) the 'Last version' attribute in the previous version is set to 'False/N' and the new record with the latest version is set to 'True/Y'.
+
+== Functionality
+
+As the name of the transform suggests, the functionality of the transform falls into 2 categories, Lookup and Update...
+
+== Lookup
+
+In read-only mode (update option is disabled), the transform only performs lookups in a slowly changing dimension.  The transform will perform a lookup in the dimension table on the specified database connection and in the specified schema. To do the lookup it uses not only the specified natural keys (with an "equals" condition) but also the specified "Stream datefield" (see below).  The condition that is applied is:
+====
+"Start of table date range" <= "Stream datefield"  AND "End of table date range" > "Stream datefield"
+
+====
+
+When no "Stream datefield" is specified we use the current system date to find the correct dimension version record.
+
+When no row is found, the "unknown" key is returned. (The "unknown" key will be 0 or 1 depending on whether or not you selected an auto-increment field for the technical key field).  Please note that we don't make a difference between "Unknown", "Not found", "Empty", "Illegal format", etc.  These nuances can be added manually however.  Nothing prevents you from flushing out these types before the data hits this transform with a Filter, regular expression, etc.  We suggest you manually ad [...]
+
+* Do not use NULL values for your natural key(s).  Null values can't be compared and are not indexed by most databases. Even if we would support null values in keys (something that doesn't make a lot of sense anyway), it would most likely cause severe lookup performance problems.
+* Be aware of data conversion issues that occur if you have data types in your input streams that are different from the data types in your natural key(s).  If you are have Strings in the transforms input and in the database you use an Integer for example, make sure you are capable of converting the String to number.  See it as a best practice to do this before this transform to make sure it works as planned.  Another typical example of problems is with floating point number comparisons. [...]
+
+== Update
+
+In update mode (update option is enabled) the transform first performs a lookup of the dimension entry as described in the "Lookup" section above. The result of the lookup is different though. Not only the technical key is retrieved from the query, but also the dimension attribute fields. A field-by-field comparison then follows. The result can be one of the following situations:
+
+* The record was not found, we insert a new record in the table. 
+* The record was found and any of the following is true:
+  ** One or more attributes were different and had an "Insert" (Kimball Type II) setting: A new dimension record version is inserted.
+  ** One or more attributes were different and had a "Punch through" (Kimbal Type I) setting: These attributes in all the dimension record versions are updated.
+  ** One or more attributes were different and had an "Update" setting: These attributes in the last dimension record version are updated.
+  ** All the attributes (fields) were identical: No updates or insertions are performed.
+* Insertion of new rows are performed in the following transforms:
+  ** The current row is updated with "date_to" updated with the "Stream date field" 
+  ** The new row is inserted where the changes in attributes are recorded according to rule in previous paragraph.  "date_from" field is updated with the "Stream date field" and the "date_to" is updated with the Max date of the table range end date.
+  ** The version number of the new row in incremented by 1.
+  ** Stream date field" cannot be before the earliest start date of the currently valid rows.
+  ** select min(date_from) from dim_table where date_to = "2199-12-31 23:59:59.999"
+  ** It is important to ensure that the incoming rows are sorted by the "Stream date field" 
+
+
+== Options
+
+|===
+|Option|Description
+|transform name|Name of the transform.
+|Update the dimension?|Enable to update the dimension based on the information in the input stream; if not enabled, the dimension only performs lookups and adds the technical key field to the streams.
+|Connection|Name of the database connection on which the dimension table resides.
+|Target schema|This allows you to specify a schema name.
+|Target table|Name of the dimension table.
+|Commit size|Define the commit size, e.g. setting commit size to 10 generates a commit every 10 inserts or updates.
+|Caching a|
+* Enable the cache?  Enable this option if you want to enable data caching in this transform; set a cache size of >=0 in previous versions or -1 to disable caching.
+* Pre-load cache? You can enhance performance by reading the complete contents of a dimension table prior to performing lookups. Performance is increased by the elimination of the round trips to the database and by the sorted list lookup algorithm.
+* Cache size in rows: The cache size in number of rows that will be held in memory to speed up lookups by reducing the number of round trips to the database.
+|Keys tab|Specify the names of the keys in the stream and in the dimension table. This will enable the transform to perform the lookup.
+|Fields tab|For each of the fields you must have in the dimension, you can specify whether you want the values to be updated (for all versions, this is a Type I operation) or you want to have the values inserted into the dimension as a new version. In the example we used in the screenshot the birth date is something that's not variable in time, so if the birth date changes, it means that it was wrong in previous versions. It's only logical then, that the previous values are corrected in  [...]
+|Technical key field|The primary key of the dimension; also referred to as Surrogate Key. Use the new name option to rename the technical key after a lookup. For example, if you need to lookup different types of products like ORIGINAL_PRODUCT_TK, REPLACEMENT_PRODUCT_TK, ... 
+|Creation of technical key a|Indicates how the technical key is generated, options that are not available for your connection type will be grayed out:
+
+* Use table maximum + 1: A new technical key will be created from the maximum key in the table. Note that the new maximum is always cached, so that the maximum does not need to be calculated for each new row.
+* Use sequence: Specify the sequence name if you want to use a database sequence on the table connection to generate the technical key (typical for Oracle e.g.).
+* Use auto increment field: Use an auto increment field in the database table to generate the technical key (supported e.g. by DB2).
+|Version field|The name of the field in which to store the version (revision number).
+|Stream Datefield|If you have the date at which the dimension entry was last changed, you can specify the name of that field here. It allows the dimension entry to be accurately described for what the date range concerns. If you don't have such a date, the system date will be taken.
+When the dimension entries are looked up (Update the dimension is not selected) the date field entered into the stream datefield is used to select the appropriate dimension version based on the date from and date to dates in the dimension record.
+|Date range start field|Specify the names of the dimension entries start range.
+|Use an alternative start date? a|When enabled, you can choose an alternative to the "Min. Year"/01/01 00:00:00 date that is used.  You can use any of the following:
+
+* System date: Use the system date as a variable date/time
+* Start date of pipeline: Use the system date, taken at start of the pipeline for the start date
+* Empty (null) value
+* Column value: Select a column from which to take the value. \\\\
+
+|Table date range end|The names of the dimension entries end range
+|Get Fields button|Fills in all the available fields on the input stream, except for the keys you specified.
+|SQL button|Generates the SQL to build the dimension and allows you to execute this SQL. 
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dummy.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dummy.adoc
new file mode 100644
index 0000000..e07b594
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dummy.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/dummy/src/main/doc/dummy.adoc
+= Dummy (do nothing)
+
+== Description
+
+This transform type does not do anything. It functions as a placeholder for testing purposes or a way to combine multiple streams with the same structure.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dynamicsqlrow.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dynamicsqlrow.adoc
new file mode 100644
index 0000000..921e22e
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/dynamicsqlrow.adoc
@@ -0,0 +1,40 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/dynamicsqlrow/src/main/doc/dynamicsqlrow.adoc
+= Dynamic SQL row
+
+== Description
+
+The Dynamic SQL row transform allows you to execute a SQL statement that is defined in a database field. The lookup values are added as new fields onto the stream
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; This name has to be unique in a single pipeline
+|Connection|Select a database connection for the lookup
+|SQL field name|Specify the field that contains the SQL to execute
+|Number of rows to return|Specify the number of records to return. 0 means, return all rows
+|Outer Join|- false: don't return rows where nothing is found  - true: at least return one source row, the rest is NULL
+|Replace variables|In case you want to use variables in the SQL, e.g. ${table_name}, this option needs to be checked.
+|Query only on parameters change|If your SQL statements do not change a lot, check this option to reduce the number of physical database queries.
+|Template SQL|In Hop meta and data are separate so you have to specify the meta part in template SQL (field name and type).
+I mean any statement that returns the same row structure. 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/edi2xml.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/edi2xml.adoc
new file mode 100644
index 0000000..ae64a3d
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/edi2xml.adoc
@@ -0,0 +1,51 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/edi2xml/src/main/doc/edi2xml.adoc
+= Edi to XML
+
+== Description
+
+The Edi to Xml transform converts Edi message text (conforming to the ISO 9735 standard) to generic XML. The XML text is more accessible and allows for selective data extraction using XPath and the Get Data From XML transform. 
+
+== Options
+
+The transform configuration requires the field name containing the EDI text, and an output field name for the XML text. If the output field name is left empty, the EDI text is going to be replaced by the XML text.
+
+The structure of the XML output follows the following pattern:
+
+[source,xml]
+----
+<edifact>
+	<SEGMENT>
+		<element>
+			<value></value>
+			...
+		</element>
+		...
+	</SEGMENT>
+	...
+</edifact>
+----
+
+The conversion rules are:
+
+* the root of the document is the "edifact" tag
+* each segment in the edifact message is converted to a tag, using the segment name as the tag name.
+* each field within a segment is represented by an "element" tag
+* each value within a field is represented by an "value" tag
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/enhancedjsonoutput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/enhancedjsonoutput.adoc
new file mode 100644
index 0000000..f9d47ae
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/enhancedjsonoutput.adoc
@@ -0,0 +1,182 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/jsonoutput/src/main/doc/enhancedjsonoutput.adoc
+= Enhanced JSON Output
+
+== Description
+Enhanced Json Output transform allows to generate json blocks based on input transform values. Output json will be available as java script array or java script object depends on transform settings.
+
+This transform loops over the fields defined as Group Key and serializes JSON output accordingly. Because of this it is extremely important that the input transform data will be sorted by the group key to prevent generation errors.
+
+
+== Options
+
+=== General Tab
+
+General tab allows to specify type of transform operation, output json structure, transform output file. This file will be used to dump all generated json.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline.
+|Operation a|Specify transform operation type. Currently available 3 types of operation:
+
+1. Output value - only pass output json as a transform output field, do not dump to output file
+2. Write to file - only write to fie, do not pass to output field
+3. Output value and write to file - dump to file and pass generated json as a transform output file
+
+|Json block name|This value will be used as a name for json block. Can be empty string that will affect output json structure, see detailed description below.
+|Output value|This value will be used as a transform output field. Will contain generated json output block depending on transform settings.
+|Force Arrays In JSON| If checked, JSON output will be forced as a single item array.
+|Pritty Print JSON|If checked, JSON output will be pritty printed.
+|===
+
+
+=== Output File
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Filename|full path to output file
+|Append|If not checked new file will be created every time transform is running. If file with specified name already existed - it will be replaced by a new one. If checked - new json output will be appended at the end of existing file. Or if existing file is not exists - it will be created as in previous case.
+|Split JSON after n rows|If this number N is larger than zero, split the resulting JSON file into multiple parts of N rows.
+|Create Parent folder|Usually file name contains some path folder as a parent folder. If parent folder does not exists and this option is checked - parent folder will be created as a new folder. Otherwise - file not be found and transform will fail.
+|Do not open create at start|If not checked - file (and in some cases parent folder) will be created/opened to write during pipeline initialization. If checked - file and parent folder will be created only after transform will get any first input data.
+|Extension|Output file extension. Default value is 'js'
+|Encoding|Output file encoding
+|Include date in filename?|If checked - output file name will contains File name value + current date. This may help to generate unique output files.
+|Include time in filename|If checked - output file name will contains file creation time. Same as for 'Include date in filename' option
+|Show filename(s) button|Can be useful to test full output file path
+|Add file to result filenames?|If checked - created output file path will be accessible form transform result
+|===
+
+=== Group Key Tab
+
+This tab is used to map input transform fields key used to properly generate output json values
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Fieldname|Input transform field name that will ccontribute to define the input transform fields key. Use 'Get Fields' button to discover available input fields
+|Element name|Json element name. For example "A":"B" - A is a element name, B is actual input value mapped for this Element name.
+|===
+
+=== Fields Tab
+
+This tab is used to map input transform fields to output json values
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Fieldname|Input transform field name. Use 'Get Fields' button to discover available input fields
+|Element name|Json element name as a key. For example "A":"B" - A is a element name, B is actual input value mapped for this Element name.
+|JSON Fragment|If the value is set to Y the value contained in the filed is a JSON chunk and will be treated accordingly
+|Remove if Blank|If the value is set to Y and value in incoming field is null the related attribute will be omitted from JSON output
+
+|===
+
+== A Quick Example
+As an example suppose we have, as input, a flow with the following fields and values
+
+|===
+|Field1|Field2|Field3
+|A|B|1
+|A|B|2
+|B|C|1
+|B|C|2
+|B|D|4
+|C|F|5
+|C|F|6
+|C|V|6
+|C|B|7
+|===
+
+=== Tab General - Configuration
+|===
+|Option|Assigned Value
+|Operation|Write To File
+|JSON Block Name|result
+|Output Value|lvl1Detail
+|Pritty Print JSON|Checked
+|Filename|Set to a convenient filename for JSON output
+|===
+
+=== Tab Group Key - Configuration
+|===
+|Field Name|Element Name
+|field1|recordKey
+|===
+
+=== Fields Group Key - Configuration
+|===
+|Field Name|Element Name|JSON Fragment|Remove If Blank
+|field3||N|N
+|field3||N|N
+|===
+
+=== Output file result
+
+[source, json]
+----
+{
+  "result" : [ {
+    "key2" : "C",
+    "lvl1Details" : {
+      "result" : [ {
+        "campo2" : "F",
+        "campo3" : "5"
+      }, {
+        "campo2" : "F",
+        "campo3" : "6"
+      }, {
+        "campo2" : "V",
+        "campo3" : "6"
+      }, {
+        "campo2" : "B",
+        "campo3" : "7"
+      } ]
+    }
+  }, {
+    "key2" : "B",
+    "lvl1Details" : {
+      "result" : [ {
+        "campo2" : "C",
+        "campo3" : "1"
+      }, {
+        "campo2" : "C",
+        "campo3" : "2"
+      }, {
+        "campo2" : "D",
+        "campo3" : "4"
+      } ]
+    }
+  }, {
+    "key2" : "A",
+    "lvl1Details" : {
+      "result" : [ {
+        "campo2" : "B",
+        "campo3" : "1"
+      }, {
+        "campo2" : "B",
+        "campo3" : "2"
+      } ]
+    }
+  } ]
+}
+----
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/excelinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/excelinput.adoc
new file mode 100644
index 0000000..147a1a7
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/excelinput.adoc
@@ -0,0 +1,117 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/excelinput/src/main/doc/excelinput.adoc
+= Microsoft Excel input
+
+== Description
+
+The Microsoft Excel Input transform provides you with the ability to read data from Microsoft Excel. The following sections describe each of the available features for configuring this transform.
+
+The default spreadsheet type (engine) is set to Excel XLSX, XLS. When you are reading other file types like OpenOffice ODS and using special functions like protected worksheets, you need to change the Spread sheet type (engine) in the Content tab accordingly.
+
+== Options
+
+=== Files Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform Name|Name of the transform; the name has to be unique in a single transform.
+|Spread sheet type (engine) a|This field allows you to specify the spreadsheet type. Currently the following are supported:
+
+* Excel XLSX, XLS: This is the default, if you select this spread sheet type you can read all known Excel file types.  Functionality provided by the Apache POI project.
+* Excel XLSX (Streaming): This spread sheet type allows to read in large Excel files.
+* Open Office ODS: By selecting this type you can read OpenOffice spreadsheet using the ODFDOM engine.
+|File or directory|Specifies the location and/or name of the input text file. Note: Click Add to add the file/directory/wildcard combination to the list of selected files (grid) below.
+|Regular expression|Specify the regular expression you want to use to select the files in the directory specified in the previous option. For example, you want to process all files that have a .txt extension. (For further details see the Text File Input transform and the section "Selecting file using Regular Expressions")
+|Exclude Regular Expression|Excludes all files (in a given location) that meet the criteria specified by this regular expression.
+|Selected Files|Contains a list of selected files (or wildcard selections) and a property specifying if file is required or not. If a file is required and it is not found, an error is generated;otherwise, the file name is skipped.
+|Accept filenames from previous transforms|Allows you to read in file names from a previous transform in the transform. You must also specify which transform you are importing from, and the input field in that transform from which you will retrieve the filename data
+|Show filenames(s)...|Displays a list of all files that will be loaded based on the current selected file definitions
+|Preview rows|Click Preview to examine the contents of the specified Excel file
+|===
+
+=== Sheets 
+
+In this tab you can specify the names of the sheets in the Excel workbook to read.  For each of the sheet names you can specify the row and column to start at.
+
+=== Content
+
+|===
+|Option|Description
+|Header|Enable if the sheets specified contain a header row to skip
+|No empty rows|Enable if you don't want empty rows in the output of this transform
+|Stop on empty row|Makes the transform stop reading the current sheet of a file when a empty line is encountered
+|Limit|Limits the number of rows to this number (zero (0) means all rows).
+|Encoding|Specifies the text file encoding to use. Leave blank to use the default encoding on your system. To use Unicode, specify UTF-8 or UTF-16. On first use, Spoon searches your system for available encodings.)
+|===
+
+=== Error handling
+
+|===
+|Option|Description
+|Strict types?|If checked, Hop will report data type errors in the input.
+|Ignore errors?|Enable if you want to ignore errors during parsing
+|Skip error lines?|If checked, Hop will skip lines that contain errors. These lines can be dumped to a separate file by specifying a path in the Failing line numbers files directory field below. If this is not checked, lines with errors will appear as NULL values in the output.
+|Warnings file directory|When warnings are generated, they are placed in this directory. The name of that file is <warning dir>/filename.<date_time>.<warning extension>
+|Error files directory|When errors occur, they are placed in this directory. The name of that file is <errorfile_dir>/filename.<date_time>.<errorfile_extension>
+|Failing line numbers files directory|When a parsing error occurs on a line, the line number is placed in this directory. The name of that file is <errorline dir>/filename.<date_time>.<errorline extension>
+|===
+
+
+=== Fields tab
+
+The fields tab is for specifying the fields that must be read from the Excel files. Use Get fields from header row to fill in the available fields if the sheets have a header row automatically.
+
+The Type column performs type conversions for a given field. For example, if you want to read a date and you have a String value in the Excel file, specify the conversion mask. Note: In the case of Number to Date conversion (for example, 20051028--> October 28th, 2005) specify the conversion mask yyyyMMdd because there will be an implicit Number to String conversion taking place before doing the String to Date conversion.
+
+|===
+|Option|Description
+|Name|The name of the field.
+|Type|The field's data type; String, Date or Number.
+|Length|The length option depends on the field type. Number: total number of significant figures in a number; String: total length of a string; Date: determines how much of the date string is printed or recorded.
+|Precision|The precision option depends on the field type, but only Number is supported; it returns the number of floating point digits.
+|Trim type|Truncates the field (left, right, both) before processing. Useful for fields that have no static length.
+|Repeat|If set to Y, will repeat this value if the field in the next row is empty.
+|Format|The format mask (number type). See Text File Input transform and the section "Number Formats" for a complete description of format symbols.
+|Currency|Symbol used to represent currencies.
+|Decimal|A decimal point; this is either a dot or a comma.
+|Grouping|A method of separating units of thousands in numbers of four digits or larger. This is either a dot or a comma.
+|===
+
+
+=== Additional output fields tab
+
+This tab retrieves custom metadata fields to add to the transform's output. The purpose of each field is defined in its name, but you can use these fields for whatever you want. Each item defines an output field that will contain the following information. Some of these are missing.
+
+|===
+|Option|Description
+|Full filename field|The full file name plus the extension.
+|Sheetname field|The worksheet name you're using.
+|Sheet row nr field|The current sheet row number.
+|Row nr written field|Number of rows written
+|Short filename field|The field name that contains the filename without path information but with an extension.
+|Extension field|The field name that contains the extension of the filename.
+|Path field|The field name that contains the path in operating system format.
+|Size field|The field name that contains the size of the file, in bytes.
+|Is hidden field|The field name that contains if the file is hidden or not (boolean).
+|Uri field|The field name that contains the URI.
+|Root uri field|The field name that contains only the root part of the URI.
+|===
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/excelwriter.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/excelwriter.adoc
new file mode 100644
index 0000000..3c1356e
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/excelwriter.adoc
@@ -0,0 +1,108 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/excelwriter/src/main/doc/excelwriter.adoc
+= Excel writer
+
+== Description
+
+The Microsoft Excel Writer transform writes incoming rows from Hop out to an MS Excel file and supports both the .xls and .xlsx file formats. The .xls files use a binary format which is better suited for simple content, while the .xlsx files use the Open XML format which works well with templates since it can better preserve charts and miscellaneous objects.
+
+== Options
+
+=== File & sheet tab
+
+*File section*
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Stream XLSX data|Check this option when writing large XLSX files. It uses internally a streaming API and is able to write large files without any memory restrictions (of course not exceeding Excel's limit of 1,048,575 rows and 16,384 columns). Note: This option is available since version 4.4.0.
+|If output file exists|Check this option when writing large XLSX files. It uses internally a streaming API and is able to write large files without any memory restrictions (of course not exceeding Excel's limit of 1,048,575 rows and 16,384 columns). Note: This option is available since version 4.4.0.
+|Add filename(s) to result|Check to have the filename added to the result filenames
+|Wait for first row before creating file|Checking this option makes the transform create the file only after it has seen a row. If this is disabled the output file is always created, regardless of whether rows are actually written to the file.
+|===
+
+*Sheet section*
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Sheet Name|The sheet name the transform will write rows to.
+|Make this the active sheet|If checked the Excel file will by default open on the above sheet when opened in MS Excel.
+|If sheet exists in output file|The output file already has this sheet (for example when using a template, or writing to existing files), you can choose to write to the existing sheet, or replace it.
+|Protect Sheet|The XLS file format allows to protect an entire sheet from changes. If checked you need to provide a password. Excel will indicate that the sheet was protected by the user you provide here.
+|===
+
+*Template section*
+
+When creating new files (when existing files are replaced, or completely fresh files are created) you may choose to create a copy of an existing template file instead. Please make sure that the template file is of the same type as the output file (both must be xls or xlsx respectively).
+
+When creating new sheets, the transform may copy a sheet from the current document (the template or an otherwise existing file the transform is writing to). A new sheet is created if the target sheet is not present, or the existing one shall be replaced as per configuration above.
+
+=== Content tab
+
+*Content options section*
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Start writing at cell|This is the cell to start writing to in Excel notation (letter column, number row)
+|When writing rows|The transform may overwrite existing cells (fast), or shift existing cells down (append new rows at the top of sheet)
+|Write Header|If checked the first line written will contain the field names
+|Write Footer|If checked the last line written will contains the field names
+|Auto Size Columns|If checked the transform tries to automatically size the columns to fit their content. Since this is not a feature the xls(x) file formats support directly, results may vary.
+|Force formula recalculation a|If checked, the transform tries to make sure all formula fields in the output file are updated.
+
+* The xls file format supports a "dirty" flag that the transform sets. The formulas are recalculated as soon as the file is opened in MS Excel.
+* For the xlsx file format, the transform must try to recalculate the formula fields itself. Since the underlying POI library does not support the full set of Excel formulas yet, this may give errors. The transform will throw errors if it cannot recalculate the formulas.
+|Leave styles of existing cells unchanged|If checked, the transform will not try to set the style of existing cells it is writing to. This is useful when writing to pre-styled template sheets.
+|===
+
+*When writing to existing sheet section*
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Start writing at end of sheet|The transform will try to find the last line of the sheet, and start writing from there.
+|Offset by ... rows|Any non-0 number will cause the transform to move this amount of rows down (positive numbers) or up (negative numbers) before writing rows. Negative numbers may be useful if you need to append to a sheet, but still preserve a pre-styled footer.
+|Begin by writing ... empty lines|The transform will try to find the last line of the sheet, and start writing from there.
+|Omit Header|Any non-0 number will cause the transform to move this amount of rows down (positive numbers) or up (negative numbers) before writing rows. Negative numbers may be useful if you need to append to a sheet, but still preserve a pre-styled footer.
+|===
+
+*Fields section*
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Name|The field to write
+|Type|The type of data
+|Format|The Excel format to use in the sheet. Please consult the Excel manual for valid formats. There are some online references as well.
+|Style from cell|A cell (i.e. A1, B3 etc.) to copy the styling from for this column (usually some pre-styled cell in a template)
+|Field Title|If set, this is used for the Header/Footer instead of the Hop field name
+|Header/Footer style from cell|A cell to copy the styling from for headers/footers (usually some pre-styled cell in a template)
+|Field Contains Formula|Set to Yes, if the field contains an Excel formula (no leading '=')
+|Hyperlink|A field, that contains the target to link to. The supported targets are Link to other cells, http, ftp, email, and local documents
+|Cell Comment / Cell Author|The xlsx format allows to put comments on cells. If you'd like to generate comments, you may specify fields holding the comment and author for a given column.
+|===
+
+== Metadata Injection Support
+
+You can use the Metadata Injection supported fields with ETL Metadata Injection transform to pass metadata to your pipeline at runtime. The following Value fields of the Microsoft Excel Writer transform support metadata injection:
+
+* Name, Type, Format, Style from Cell, Field Title, Header/Footer Style from Cell, Field Contains Formula, Hyperlink, Cell Comment (XLSX), and Cell Comment Author (XLSX).
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/execprocess.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/execprocess.adoc
new file mode 100644
index 0000000..620eabe
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/execprocess.adoc
@@ -0,0 +1,42 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/execprocess/src/main/doc/execprocess.adoc
+= Execute a process
+
+== Description
+
+You can use the Execute a process transform to execute a shell script on the host where a workflow will run.
+
+* The transform is similar to the workflow action Shell, but can be used in a pipeline to execute for every row.
+
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline
+|Process field|The field name in the data stream that defines the process to start (shell script / batch file to start). Arguments can also be used.
+|Fail if not success|Checking this option means if the exit status is different than zero the transform fails. You can use error handling to get these rows.
+|Output line delimiter|Without defining a line delimiter, all returned lines are collapsed into a single string with no line delimiters.
+This option is available since 5.0 and by default empty for backward compatibility. You can set to any line delimiter and special characters can be set with the format $[value], e.g. $[0D] for CR or $[0D,0A] for CR/LF (since 5.0).
+|Result fieldname|Specify here the name of the result fieldname (STRING) added to the output stream of the pipeline. This field is populated by the output stream (stdout) of the process.
+|Error fieldname|Specify here the name of the error fieldname (STRING) added to the output stream of the pipeline. This field is filled by the error stream (stderr) of the process.
+|Exit value|Specify here the name of the exit fieldname (INTEGER) added to the output stream of the pipeline. This field is filled by the exit output of the process. 
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/execsqlrow.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/execsqlrow.adoc
new file mode 100644
index 0000000..276bcd9
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/execsqlrow.adoc
@@ -0,0 +1,44 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/execsqlrow/src/main/doc/execsqlrow.adoc
+= Execute row SQL script
+
+== Description
+
+Execute row SQL script for every input-row to this transform. An input field can be used to specify the SQL to execute or it can specify a file that contains the SQL to execute.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; This name has to be unique in a single pipeline
+|Connection|Select a database connection to use
+|Commit|Number of rows to send before doing a database commit
+|SQL field name|The field that either contains the SQL to execute or optionally specifies the path to a file that contains SQL to execute|Read SQL from file
+|If checked, then the SQL field name option specifies a file that contains SQL to execute, otherwise the SQL field name option specifies the actual SQL to execute. (Available since 5.0)
+|Field to contain insert stats|Optional: If you want to get an additional field in our stream with the number or records that where inserted, please define the field name here.
+|Field to contain update stats|Same as insert stats, but for updated rows.
+|Field to contain delete stats|Same as insert stats, but for deleted rows.
+|Field to contain read stats|Same as insert stats, but for read rows.
+|===
+
+== Notes
+
+Because of the scripting/dynamic nature of the transform, it does not use prepared statements and thus is not intended to work fast or optimal. For good performance turn to dedicated transforms like Table Output (insert into), Table Input (Select), Update, Delete, etc.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fake.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fake.adoc
new file mode 100644
index 0000000..738d10a
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fake.adoc
@@ -0,0 +1,399 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/fake/src/main/doc/fake.adoc
+= Fake data
+
+== Description
+
+This tranform type allows you to generate fake data using the Java Faker library which can be found link:https://github.com/DiUS/java-faker[here] for full documentation. It can be used to generate pretty data for development, testing or showcasing a project.
+
+For instance we could generate some random Pokémon data.
+
+[width="90%", options="header"]
+|===
+|name|location
+|Krabby|Snowpoint City
+|Mankey|Sootopolis City
+|Grimer|Five Island
+|Drowzee|Five Island
+|Onix|Dendemille Town
+|===
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Locale|A locale can be used to make the generated data more specific to a single location.
+|Output field name|The name of the generated field.
+|Type of fake data|The data category.
+|Topic to generate|The topic within the data category to generate.
+|===
+
+== Types of categories
+
+[width="90%", options="header"]
+|===
+|Type|Topics
+|Address a|
+- streetName
+- streetAddressNumber
+- streetAddress
+- ...
+|Ancient a|
+- god
+- primordial
+- titan
+- hero
+|Animal a|
+- name
+|App a|
+- name
+- version
+- author
+|Aqua Teen Hunger Force a|
+- character
+|Artist a|
+- name
+|Avatar a|
+- image
+|Back To The Future a|
+- character
+- date
+- quote
+|Aviation a|
+- aircraft
+- airport
+- METAR
+|Basketball a|
+- teams
+- coaches
+- positions
+- players
+|Beer a|
+- name
+- style
+- hop
+- ...
+|Bojack Horseman a|
+- characters
+- quotes
+- tongueTwisters
+|Book a|
+- author
+- title
+- publisher
+- genre
+|Bool a|
+- bool
+|Business a|
+- creditCardNumber
+- creditCardType
+- creditCardExpiry
+|ChuckNorris a|
+- fact
+|Cat a|
+- name
+- breed
+- registry
+|Code a|
+- isbnGs1
+- isbnGroup
+- isbnRegistrant
+- ...
+|Coin a|
+- flip
+|Color a|
+- name
+- hex
+|Commerce a|
+- color
+- department
+- productName
+- ...
+|Company a|
+- name
+- suffix
+- industry
+- ...
+|Crypto a|
+- md5
+- sha1
+- sha256
+- sha512
+|DateAndTime a|
+- future
+- between
+- past
+- birthday
+|Demographic a|
+- race
+- educationalAttainment
+- denonym
+- sex
+- maritalStatus
+|Disease a|
+- internalDisease
+- neurology
+- surgery
+- ...
+|Dog a|
+- name
+- breed
+- sound
+- ...
+|DragonBall a|
+- character
+|Dune a|
+- character
+- title
+- planet
+- ...
+|Educator a|
+- university
+- course
+- secondarySchool
+- campus
+|Esports a|
+- player
+- team
+- event
+- ...
+|File a|
+- extension
+- mimeType
+- fileName
+|Finance a|
+- creditCard
+- bic
+- iban
+- calculateIbanChecksum
+- ...
+|Food a|
+- ingredient
+- spice
+- dish
+- ...
+|Friends a|
+- character
+- location
+- quote
+|FunnyName a|
+- name
+|GameOfThrones a|
+- character
+- house
+- city
+- ...
+|Hacker a|
+- abbreviation
+- adjective
+- noun
+- ...
+|HarryPotter a|
+- character
+- location
+- quote
+- ...
+|Hipster a|
+- word
+|HitchhikersGuideToTheGalaxy a|
+- character
+- location
+- marvinQuote
+- ...
+|Hobbit a|
+- character
+- thorinsConpany
+- quote
+- location
+|HowIMetYourMother a|
+- character
+- catchPhrase
+- highFive
+- quote
+|IdNumber a|
+- valid
+- invalid
+- ssnValid
+- ...
+|Internet a|
+- emailAddress
+- safeEmailAddress
+- domainName
+- ...
+|Job a|
+- field
+- seniority
+- position
+- keySkills
+- title
+|Kaamelott a|
+- character
+- quote
+|LeagueOfLegends a|
+- champion
+- location
+- quote
+- ...
+|Lebowski a|
+- actor
+- character
+- quote
+|LordOfTheRings a|
+- character
+- location
+|Lorem a|
+- character
+- characters
+- word
+- words
+- ...
+|Matz a|
+- quote
+|Music a|
+- instrument
+- key
+- chord
+- genre
+|Name a|
+- name
+- nameWithMiddle
+- fullName
+- ...
+|Nation a|
+- nationality
+- language
+- capitalCity
+- flag
+|Number a|
+- randomDigit
+- randomDigitNotZero
+- numberBetween
+- ...
+|Options a|
+- option
+- nextElement
+|Overwatch a|
+- hero
+- location
+- quote
+|PhoneNumber a|
+- cellPhone
+- phoneNumber
+- ...
+|Pokemon a|
+- name
+- location
+|Princess Bride a|
+- character
+- quote
+|Relationship Terms a|
+- direct
+- extended
+- inLaw
+- ...
+|RickAndMorty a|
+- character
+- location
+- quote
+|Robin a|
+- quote
+|RockBand a|
+- name
+|Shakespeare a|
+- hamletQuote
+- asYouLikeItQuote
+- kingRichardIIIQuote
+- ...
+|SlackEmoji a|
+- people
+- nature
+- foodAndDrink
+- ...
+|Space a|
+- planet
+- moon
+- galaxy
+- ...
+|StarCraft a|
+- unit
+- building
+- character
+- planet
+|StarTrek a|
+- character
+- location
+- specie
+- ...
+|Stock a|
+- nsdqSymbol
+- nyseSymbol
+|Superhero a|
+- name
+- prefix
+- suffix
+- ...
+|Team a|
+- name
+- creature
+- state
+- sport
+|TwinPeaks a|
+- character
+- location
+- quote
+|University a|
+- name
+- prefix
+- suffix
+|Weather a|
+- description
+- temperatureCelsius
+- temperatureFahrenheit
+- temperature
+|Witcher a|
+- character
+- witcher
+- school
+- ...
+|Yoda a|
+- quote
+|Zelda a|
+- game
+- character
+|===
+
+
+== Locales
+
+- bg
+- ca
+- ca-CAT
+- da-DK
+- de
+- de-AT
+- de-CH
+- en
+- ...
+- en-US
+- ...
+- nl
+- ...
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fieldschangesequence.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fieldschangesequence.adoc
new file mode 100644
index 0000000..90190a4
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fieldschangesequence.adoc
@@ -0,0 +1,34 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/fieldschangesequence/src/main/doc/fieldschangesequence.adoc
+= Add value fields changing sequence
+
+== Description
+
+This transform simply adds a sequence value which resets each time a value changes in the list of specified fields.
+
+== Options
+
+Here are the options for the transform:
+
+* transform name : the name of the transform, needs to be unique in a pipeline
+* Result field: the name of the output field, the sequence
+* Start at value: the number to start at each time
+* Increment by: the value to increase at each row in the same group
+* Init sequence if value of following fields change: you can specify a list of fields here.  If a value in one or more of these fields changes compared to the previous rows, the sequence will be reset to the start value.
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fieldsplitter.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fieldsplitter.adoc
new file mode 100644
index 0000000..2412bc9
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fieldsplitter.adoc
@@ -0,0 +1,83 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/fieldsplitter/src/main/doc/fieldsplitter.adoc
+= Split Fields
+
+== Description
+
+You can use the Split Fields transform to split a field into multiple fields based on a specified delimiter.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline
+|Field to split|The name of the field you want to split
+|Delimiter|Delimiter that determines the field. Special characters (e.g. CHAR ASCII HEX01) can be set with the format $[value], e.g. $[01] or $[6F,FF,00,1F].
+|Enclosure|You can specify an enclosure string which when placed around a value allows delimiters to be present in it.  For example with a comma (,) delimiter:  ```aaaa,"123,456",ccc``` can be resolved to 3 fields using enclosure ```"```.
+| Escape string|To include delimiter characters in values sometimes an escape string is used like backslash, double backslash and so on.
+For example with a comma (,) delimiter:  ```aaaa,123\\,456,ccc``` can be resolved to 3 fields using escape character ```\\```.
+|Fields table|This table is where you define the properties for each new field created by the split. For each new field, you must define the field name, data type, and other properties.
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
+
+== Examples
+
+Below are examples of split fields:
+
+=== Example 1
+
+SALES_VALUES field containing: "500,300,200,100"
+
+Use these settings to split the field into four new fields:
+
+* Delimiter: ,
+* Field: SALES1, SALES2, SALES3, SALES4
+* Id:
+* remove ID no, no, no, no
+* type: Number, Number, Number, Number
+* format: ###.##, ###.##, ###.##, ###.##
+* group:
+* decimal: .
+* currency:
+* length: 3, 3, 3, 3
+* precision: 0, 0, 0, 0
+
+=== Example 2
+
+SALES_VALUES field containing "Sales2=310.50, Sales4=150.23"
+
+Use these settings to split the field into four new fields:
+
+* Delimiter: ,
+* Field: SALES1, SALES2, SALES3, SALES4
+* Id: Sales1=, Sales2=, Sales3=, Sales4=
+* remove ID yes, yes, yes, yes
+* type: Number, Number, Number, Number
+* format: ###.##, ###.##, ###.##, ###.##
+* group:
+* decimal: .
+* currency:
+* length: 7, 7, 7, 7
+* precision: 2, 2, 2, 2
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fileexists.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fileexists.adoc
new file mode 100644
index 0000000..650f357
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fileexists.adoc
@@ -0,0 +1,39 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/fileexists/src/main/doc/fileexists.adoc
+= File exists
+
+== Description
+
+This transforms verifies the existence of a file where the filename comes from previous transforms.
+
+The result is a boolean flag field that gets added to the input fields in the output.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; This name has to be unique in a single pipeline
+|Filename field|The input field that will contain the filename at runtime
+|Result fieldname|The name of the field that will contain the boolean flag.
+|Add filename to result|Enable this option if you want to add the filename to the list of filenames that can be used in the next workflow action.
+|Include file type|Include the file type in a field.
+|File type field|The name of the field that will contain the file type as a String: "file", "folder", "imaginary"
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filelocked.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filelocked.adoc
new file mode 100644
index 0000000..d352957
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filelocked.adoc
@@ -0,0 +1,32 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/filelocked/src/main/doc/filelocked.adoc
+= Check if file is locked
+
+== Description
+
+This transform tries to determine if a file is locked by an other process.  It does this by trying to move it to itself.  
+
+== Options
+
+
+* transform name: the transform name, unique in a pipeline
+* Filename field: the name of the input field that will contain the names of the files during execution.
+* Result fieldname: the name of the Boolean output field that will contain true or false depending on whether or not the file is locked or not.
+* Add filename to result: check this if you want to add the checked file-names to the list of files in the pipelines result.
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filesfromresult.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filesfromresult.adoc
new file mode 100644
index 0000000..23caaaf
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filesfromresult.adoc
@@ -0,0 +1,47 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/filesfromresult/src/main/doc/filesfromresult.adoc
+= Files from result
+
+== Description
+
+Every time a file gets processed, used or created in a pipeline or a workflow, the details of the file, the workflow action, the transform, etc. is captured and added to an internal result set when the option 'Add file names to result' is set, e.g. in a Text File Output transform. You can access this file information using this transform.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|The unique transform name within the pipeline 
+|===
+
+
+== Output fields
+
+[width="90%", options="header"]
+|===
+|Field name|Type|Example
+|type|String|Normal, Log, Error, Error-line, etc.
+|filename|String|somefile.txt
+|path|String|C:\Foo\Bar\somefile.txt
+|parentorigin|String|Process files pipeline
+|origin|String|Text File Input
+|comment|String|Read by text file input
+|timestamp|Date|2006-06-23 12:34:56 
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filestoresult.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filestoresult.adoc
new file mode 100644
index 0000000..97069b0
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filestoresult.adoc
@@ -0,0 +1,34 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/filestoresult/src/main/doc/filestoresult.adoc
+= Files to result
+
+== Description
+
+This transform allows you to set filenames in the internal result files of a pipeline. Subsequent workflow actions can then use this information as it routes the list of files to the result files stream e.g. by using the Get files from result transform or some workflow actions that can process on the list of result files. For example, the Mail workflow action can use this list of files to attach to a mail, so perhaps you don't want all files sent, but only a certain selection. For this,  [...]
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|The name of this transform as it appears in the pipeline workspace.
+|Filename field|Field that contains the filenames of the files to copy.
+|Type of file to|Select the type of file to set in results. 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filterrows.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filterrows.adoc
new file mode 100644
index 0000000..42e8f3e
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/filterrows.adoc
@@ -0,0 +1,87 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/filterrows/src/main/doc/filterrows.adoc
+= Filter Rows
+
+== Description
+
+The Filter Rows transform allows you to filter rows based on conditions and comparisons. Once this transform is connected to a previous transform (one or more and receiving input), you can click on the "<field>", "=" and "<value>" areas to construct a condition.
+
+To enter an IN LIST operator, use a string value separated by semicolons. This also works on numeric values like integers. The list of values must be entered with a string type, e.g.: 2;3;7;8
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|Optionally, you can change the name of this transform to fit your needs.
+|Send 'true' data to transform|The rows for which the condition specified is true are sent to this transform
+|Send 'false' data to transform|The rows for which the condition specified are false are sent to this transform
+|The Condition|
+|Add Condition| Click to add conditions. Add condition converts the original condition into a sub-level condition. Click a sub-condition to edit it by going down one level in the condition tree. 
+|===
+
+== Filtering
+
+=== Filtering rows based on values from variables
+
+The filter rows transform detects only fields in the input stream. If you want to filter rows based on a variable value, you must modify the previous transform (a table input for example) and include the variable as another field, for example:
+
+[source,bash]
+----
+${myvar}=5
+----
+
+A query:
+
+[source,sql]
+----
+SELECT field1,
+field2,
+${myvar} AS field3 
+FROM table
+WHERE field1=xxxx
+----
+
+Then in the filter row condition, you can have
+
+[source,bash]
+----
+field1 = field3
+----
+
+*FYI* - instead of the Table Input trick above you can use the simple "Get Variables" transform to set parameters in fields.
+
+=== Filtering special characters
+
+To filter special characters like explicit EOF (e.g. from old cobol files) Use a REGEX expression in the "filter row" transform with the syntax: "\x{1A}" where \x mean HEX representation and 1A into parenthesis is the EOF char to match in HEX.
+
+== Metadata injection support 
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
+
+== Special considerations for the condition field
+
+The Filter Rows transform is a special MDI scenario, since it has a nested structure of filter conditions. The condition is given in XML format. The condition XML has the same format as we store the pipeline metadata in a .HPL file in XML format. We do not have a DTD (Document Type Definition) for the .HPL XML format, nor the condition.
+
+It is easy to get to an XML condition:
+
+1. Create a sample Filter transform with the different conditions you need. This sample transform gives you all the information, such as the values for the functions you use.
+2. Select the transform, copy it to the clipboard, and then paste it into a text editor. Alternatively, you can store the .HPL, and then open the .HPL in a text editor.
+3. Find the <condition> element and its nested elements and modify it accordingly to use it in your MDI scenario.
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/flattener.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/flattener.adoc
new file mode 100644
index 0000000..e7806e4
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/flattener.adoc
@@ -0,0 +1,54 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/flattener/src/main/doc/flattener.adoc
+= Row Flattener
+
+== Description
+
+The Row Flattener transform allows you flatten data sequentially.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|Name of the transform; this name has to be unique in a single pipeline
+|The field to flatten|The field that must be flattened into different target fields
+|Target fields|The name of the target field to which the field is flattened
+|===
+
+== Example
+
+In the sample below, if you begin with the following data set...
+[width="90%", options="header"]
+|===
+|Field1|Field2|Field3|Flatten
+|A|B|C|One
+|Z|Y|X|Two
+|D|E|F|Three
+|W|V|U|Four
+|===
+
+The data set can be flattened to the example shown below:
+[width="90%", options="header"]
+|===
+|Field1|Field2|Field3|Target1|Target2
+|A|B|C|One|Two
+|D|E|F|Three|Four
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fuzzymatch.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fuzzymatch.adoc
new file mode 100644
index 0000000..964b0f1
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/fuzzymatch.adoc
@@ -0,0 +1,69 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/fuzzymatch/src/main/doc/fuzzymatch.adoc
+= Fuzzy match
+
+== Description
+
+The Fuzzy Match transform finds strings that potentially match using duplicate-detecting algorithms that calculate the similarity of two streams of data. This transform returns matching values as a separated list as specified by user-defined minimal or maximal values.
+
+== Options
+
+=== General tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|Name of this transform as it appears in the pipeline workspace
+|Lookup transform|Identifies the transform that contains the fields to match
+|Lookup field|Identifies the field to match
+|Main stream field|Identifies the primary stream to match the Lookup field with
+|Algorithm|Identifies which string-matching algorithm to use---options include Levenshtein, Damerau-Levenshtein, Needleman Wunsch, Jaro, Jaro Winkler, Pair letters similarity, Metaphone, Double Metaphone, SoundEx, or Refined SoundEx
+|Case sensitive|Identifies if streams can or cannot differ based on the use of uppercase and lowercase letters---only for use with the Levenshtein algorithms
+|Get closer value|When checked, returns a single result with the highest similarity score---when unchecked, returns all matches that satisfy the minimal and maximal value setting as a separated list, separated by the values separator
+|Minimum value|Identifies the lowest possible similarity score
+|Maximal value|Identifies the highest possible similarity score
+|Values separator|Identifies the string that separate the matches. Only available for specific algorithms and when the Get closer value option is unchecked.
+|===
+
+*Algorithm Definitions*
+
+Within the Algorithm field, there are several options available to compare and match strings.
+
+* Levenshtein and Damerau-Levenshtein---calculate the distance between two strings by looking at how many edit transforms are needed to get from one string to another. The former only looks at inserts, deletes, and replacements. The latter adds transposition. The score indicates the minimum number of changes needed. For instance, the difference between John and Jan would be two; to turn the name John into Jan you need one transform to replace the O with an A, and another transform to del [...]
+* Needleman Wunsch---calculates the similarity of two sequences and is mainly used in bioinformatics. The algorithm calculates a gap penalty. The aforementioned example would have a score of negative two.
+* Jaro and Jaro Winkler---calculate a similarity index between two strings. The result is a fraction between zero, indicating no similarity, and one, indicating an identical match.
+* Pair letters similarity---dissects the two strings in pairs and calculates the similarity of the two strings by dividing the number of common pairs by the sum of the pairs from both strings.
+* Metaphone, Double Metaphone, SoundEx, and Refined SoundEx---are phonetic algorithms, which try to match strings based on how they would sound. Each is based on the English language and would not be useful to compare other languages.
+  ** The Metaphone algorithm returns an encoded value based on the English pronunciation of a given word. The encoded value of the names John and Jan would return the value JN for both names.
+  ** The Double Metaphone algorithm has fundamental design improvements over its predecessor and uses a more complex ruleset for coding. It can return a primary and a secondary encoded value for a string. The names John and Jan each return metaphone key values of JN and AN.
+  ** The Soundex algorthim returns a single encoded value for a name that consists of a letter followed by three numerical digits. The letter is the first letter of the name, and the digits encode the remaining consonants.
+  ** The Refined SoundEx algorithm is an improvement over its predecessor. Encoded values for this algorithm are six digits long, the initial character is encoded, and multiple possible encodings can be returned for a single name. Using this algorithm, the name John returns the values 160000 and 460000, as does the name Jan.
+
+=== Fields tab
+
+The Fields tab enables you to define how to return the results of a comparison.
+[width="90%", options="header"]
+|===
+|Option|Description
+|Match field|Defines the name of the column that contains the comparison value
+|Value field|Defines the similarity score for which to return a value 
+|===
+
+You can also specify the list of additional fields to retrieve from the lookup stream.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getfilenames.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getfilenames.adoc
new file mode 100644
index 0000000..dd81587
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getfilenames.adoc
@@ -0,0 +1,62 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/getfilenames/src/main/doc/getfilenames.adoc
+= Get filenames
+
+== Description
+
+The Get File Names transform allows you to get information associated with file names on the file system. The retrieved file names are added as rows onto the stream.
+
+The output fields for this transform are:
+
+* filename - the complete filename, including the path (/tmp/hop/somefile.txt)
+* short_filename - only the filename, without the path (somefile.txt)
+* path - only the path (/tmp/hop/)
+* type
+* exists
+* ishidden
+* isreadable
+* iswriteable
+* lastmodifiedtime
+* size
+* extension
+* uri
+* rooturi
+
+== File tab
+
+This tab defines the location of the files you want to retrieve filenames for. For more information about specifying file locations, see section "Selecting file using Regular Expressions" on the Text File Input transform.
+
+Example: You have a static directory of c:\temp where you expect files with an extension of .dat to be placed. Under file/directory you would specify c:\temp  and under Wildcard you would have a RegEx with something like .*\.dat$
+
+
+== Filters
+
+The filters tab allows you to filter the retrieved file names based on:
+
+* All files and folders
+* Files only
+* Folders only
+
+It also gives you:
+
+* The ability to include a row number in the output
+* The ability to limit the number of rows returned
+* The ability to add the filename(s) to the result list
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getfilesrowcount.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getfilesrowcount.adoc
new file mode 100644
index 0000000..429e015
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getfilesrowcount.adoc
@@ -0,0 +1,63 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/getfilesrowcount/src/main/doc/getfilesrowcount.adoc
+= Get files rowcount
+
+== Description
+
+This transform counts the number of rows in a file or set of files.
+
+== Options
+
+=== File tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|directory|
+|Expression|
+|File or|
+|Filename(s)|
+|Files|
+|Preview|
+|Regular|
+|Rows|
+|Selected
+|Show|
+|transform name|
+|===
+
+=== Content tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Rows Count|
+|fieldname|
+|Rows|
+|Separator|
+|type|
+|Row|
+|separator|
+|Include|
+|files count|
+|in output?|
+|Files Count|
+|fieldname|
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getslavesequence.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getslavesequence.adoc
new file mode 100644
index 0000000..32afbd7
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getslavesequence.adoc
@@ -0,0 +1,134 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/getslavesequence/src/main/doc/getslavesequence.adoc
+= Get ID from slave server
+
+== Description
+
+This transform gets a unique ID from the hop server of your choice.
+
+The transform works by asking a hop server for the a range of values.  In other words, the transform reserves a range of IDs for a certain given central hop server sequence.  It then increments by one until there are no more values in the range, asks another range, and so forth.
+
+This means that the returned IDs can be sequential, however this is not guaranteed.  The only thing you know is that after each execution for each input row you get a unique ID that has a higher value than the last one returned.
+
+The last value returned + the size of the range (increment) are stored in a database table to persist the value over several runs of a pipeline. 
+
+== Context
+
+When processing large amounts of data on multiple distinct systems (for example when running a clustered pipeline) you sometimes still need to generate a unique numeric ID for each one. Since a GUID is not guaranteed to be unique and consumes a lot more memory and space compared to a numeric integer (long) ID, you may prefer to use numerical IDs.
+
+A unique ID identifies a piece of information, which is useful for: looking up data, support cases, incremental processing, error handling (recovering from where you left off by looking up the last inserted ID) and so on. Typically you would use a database to generate such an ID, for example using an Oracle sequence. However, there are cases where you don't have a database available (such as when you add a unique ID to a text/log file), when the database doesn't support sequences (column [...]
+
+Assuming you have (or are now configuring) Hop Server slave servers set up in Hop, this transform retrieves a unique ID from the Hop Server slave server of your choice. It works by asking a slave server for a range of values. In other words, the transform reserves a range of IDs for a certain given central slave server sequence. It then increments by one until there are no more values in the range, asks another range, and so forth. This means that the returned IDs can be sequential, howe [...]
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform name|The name of this transform as it appears in the pipeline workspace.
+|Name of value|The name of the (Integer type) output field (sequence or ID)
+|Slave server|The hop server to get the unique ID range from. This can be specified using a variable
+|Sequence name|The name of the sequence as defined on the hop server (see below). The sequence name can be specified using a variable
+|Increment or batch name|The number of IDs to generate before a new value (range) is obtained from the hop server. The higher you make the increment size, the faster the transform can run. However, it will deplete the available IDs (1x10^15) faster. For example, if you take 1,000,000,000 as the increment, you can reserve 1,000,000 times a range of IDs. If you only use up a few of those ID each time a pipeline runs, you will run out of IDs anyway after 1M executions. Don't make this value [...]
+|===
+
+== Slave server configuration
+
+You need to start your hop server with extra connection and sequence information configured in an XML file. The extra connection and sequences blocks are what make the sequences work in the following example:
+
+[source,xml]
+----
+<hop-server-config>
+ 
+  <hop-server>
+    <name>master1</name>
+    <hostname>localhost</hostname>
+    <port>8282</port>
+    <master>Y</master>
+  </hop-server>
+ 
+  <connection>
+    <name>MySQL</name>
+    <server>localhost</server>
+    <type>MYSQL</type>
+    <access>Native</access>
+    <database>test</database>
+    <port>3306</port>
+    <username>matt</username>
+    <password>Encrypted 2be98afc86aa7f2e4cb79ce10df90acde</password>
+  </connection>
+ 
+  <sequences>
+ 
+   <sequence>
+    <name>test</name>
+    <start>0</start>
+    <connection>MySQL</connection>
+    <schema/>
+    <table>SEQ_TABLE</table>
+    <sequence_field>SEQ_NAME</sequence_field>
+    <value_field>SEQ_VALUE</value_field>
+   </sequence>
+ 
+  </sequences>
+----
+
+The start tag is optional and will default to 0 if you leave it out of the definition. You can define as many sequences as you like.
+
+== Servlet information
+
+Once the configuration files are changed as shown above, slave servers receive a new servlet that can be called as follows (authentication required):http://hostname:port/hop/nextSequence/?name=SequenceName&increment=NumberOfIDsToReserveIn
+case no increment is specified, 10000 IDs are reserved, for example:http://localhost:8282/hop/nextSequence/?name=test
+The servlet will return a simple piece of XML containing both the start of the range as well as the number of IDs reserved, or the increment:
+
+====
+<seq><value>570000</value><increment>10000</increment></seq>
+====
+
+Continuing with this example, the following row will be present in the SEQ_TABLE table:
+====
+mysql> select * from SEQ_TABLE where SEQ_NAME='test';
+====
+
+[width="90%", options="header"]
+|===
+|SEQ_NAME|SEQ_VALUE
+|test|580000 
+|===
+
+== Automatic loading and creation
+
+It can be a burden to maintain all your sequences in an XML file. Because of this, it is also possible to automatically load all the sequences from a database table. You can use the following construct to do it:
+
+[source,xml]
+----
+<autosequence>
+    <connection>MySQL</connection>
+    <schema/>
+    <start>1234</start>
+    <table>SEQ_TABLE</table>
+    <sequence_field>SEQ_NAME</sequence_field>
+    <value_field>SEQ_VALUE</value_field>
+ 
+    <autocreate>N</autocreate>
+</autosequence>
+----
+
+The <autocreate> tag allows any sequence name to be specified in the transform without error. In that case, the sequence with the name specified will be created automatically with the start value from the <autosequence> specification.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getsubfolders.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getsubfolders.adoc
new file mode 100644
index 0000000..a385dfc
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getsubfolders.adoc
@@ -0,0 +1,53 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/getsubfolders/src/main/doc/getsubfolders.adoc
+
+= Get SubFolder names
+
+== Description
+
+The Get Subfolder Names transform gets subfolders from a directory.
+
+== Options
+
+=== Folder tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of this transform as it appears in the pipeline workspace
+|Accept foldername from field?|Allows a foldername to be passed as a field.
+|Foldername field|The field which contains the folder name.
+|Selected directories|The directories from which to get the subfolders.
+|Browse|Uses the local file browser to get a path.
+|Add|Adds the path defined in the Directory field.
+|Delete|Deletes the path
+|Edit|Change the path
+|===
+
+
+=== Settings tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Include rownum in output?|Allows the row number to be added to the output.
+|Rownum fieldname|The field which contains the row number.
+|Limit|Limits the output rows.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/gettablenames.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/gettablenames.adoc
new file mode 100644
index 0000000..05cb892
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/gettablenames.adoc
@@ -0,0 +1,51 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/gettablenames/src/main/doc/gettablenames.adoc
+
+= Get table names
+
+== Description
+
+This transform gets the table names from a database connection.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of this transform as it appears in the pipeline workspace
+|Connection|The connection to use
+|Schema name|The schema to use
+|Get schema from field|Allows passing a field containing the schema name
+|Schema name field|The field containing the schema name
+|Include catalogs|Includes catalogs in the output
+|Include schemas|Includes schemas in the output
+|Include views|Includes views in the output
+|Include procedures|Includes procedures in the output
+|Include synonyms|Include synonyms in the output
+|Add schema in object name|Adds the schema to the object name
+|Tablename fieldname|Output field containing the table name
+|Object type fieldname|Output field containing the object type (catalog, schema, table, ...)
+|Is system object fieldname|Output field containing boolean: is object a system object
+|Creation SQL fieldname|Output field containing create statement for object
+|===
+
+== Metadata injection
+
+All fields of this transform support metadata injection. You can use this transform with Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getvariable.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getvariable.adoc
new file mode 100644
index 0000000..6050642
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getvariable.adoc
@@ -0,0 +1,52 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/getvariable/src/main/doc/getvariable.adoc
+= Get variables
+
+== Description
+
+This transform allows you to get the value of a variable. This transform can return rows or add values to input rows.
+
+For example, you can specify: ${java.io.tmpdir}/hop/tempfile.txt and it will be expanded to /tmp/hop/tempfile.txt on Unix-like systems.
+
+See also the Set Variables transform.
+To convert the Variable into a data type other than String use Select Values - Meta Data tab.
+To get system values, including command line arguments, use the Get System Info transform.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform Name|The name of this transform as it appears in the pipeline workspace.
+|Name|Name of the field.
+|Variable|Allows you to enter variables as complete strings to return rows or add values to input rows. For example, you can specify: ${java.io.tmpdir}/hop/tempfile.txt and it will be expanded to /tmp/hop/tempfile.txt on Unix-like systems.
+|Type|Specifies the field type: String, Date, Number, Boolean, Integer, BigNumber, Serializable, or Binary.
+|Format|Allows you to specify the format of the field after the type has been determined.
+|Length|For Number: Total number of significant figures in a number; For String: total length of string; For Date: length of printed output of the string (for example, entering 4 would only return the year).
+|Precision|For Number: Number of floating point digits. Not used for String, Date, or Boolean.
+|Currency|Used to interpret numbers with currency symbols. For example, $10,000.00 or E5.000,00.
+|Decimal|Used to indicate whether to use a period (".") or comma (",") for number values.
+|Group|Used to indicate whether to use a period (".") or comma (",") for number values.
+|TrimType|Type trim this field before processing: select either none, left, right, or both (left and right). 
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getxmldata.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getxmldata.adoc
new file mode 100644
index 0000000..aa95497
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/getxmldata.adoc
@@ -0,0 +1,96 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/xml/src/main/doc/getxmldata.adoc
+= Get Data From XML
+
+== Description
+
+The Get Data From XML transform provides the ability to read data from any type of XML file using XPath specifications.
+
+"Get Data From XML" can read data from 3 kind of sources (files, stream and url) in 2 modes (user can define files and urls at static mode or in a dynamic way).
+
+== Options
+
+=== Files Tab
+
+The files tab is where you define the location of the XML files from which you want to read. The table below contains options associated with the Files tab.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|XML Source from field a|
+* XML source is defined in a field : the previous transform is giving XML data in a certain field in the input stream.
+* XML source is a filename : the previous transform is giving filenames in a certain field in the input stream.  These are read.
+* Read source as URL : the  previous transform is giving URLs in a certain field in the input stream.  These are read.
+* Get XML source from a field : specify the field to read XML, filename or URL from.
+|File or directory|Specifies the location and/or name of the input text file. Note: Click Add to add the file/directory/wildcard combination to the list of selected files (grid) below.
+|Regular expression|Specifies the regular expression you want to use to select the files in the directory specified in the previous option.
+|Selected Files|Contains a list of selected files (or wildcard selections) and a property specifying if file is required or not. If a file is required and it is not found, an error is generated;otherwise, the file name is skipped.
+|Show filename(s)...|Displays a list of all files that will be loaded based on the current selected file definitions 
+|===
+
+=== Content Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Settings a|
+* Loop XPath : For every "Loop XPath" location we find in the XML file(s), we will output one row of data.  This is the main specification we use to flatten the XML file(s).  You can use the "Get XPath nodes" button to search for the possible repeating nodes in the XML document.  Please note that if the XML document is large that this can take a while.
+* Encoding : the XML filename encoding in case none is specified in the XML documents. (yes, those still exist)
+* Namespace aware : check this to make the XML document namespace aware.
+* Ignore comments : Ignore all comments in the XML document while parsing.
+* Validate XML : Validate the XML prior to parsing. Use a token when you want to replace dynamically in a Xpath field value. A token is between @_ and - (@_fieldname-). Please see the Example 1 to see how it works.
+* Use token : a token is not related tro XML parsing but to Hop.
+* Igore empty file : an empty file is not a valid XML document.  Check this if you want to ignore those altogether.
+* Do not raise an error if no file: Don't raise a stink if no files are found.
+* Limit : Limits the number of rows to this number (zero (0) means all rows).
+* Prune path to handle large files: almost the same value as the "Loop XPath" property with some exceptions, see Get Data from XML - Handling Large Files for more details. Note that you can use this parameter to avoid multiple HTTP URL requests.
+
+|Additional fields a|
+
+* Include filename in output? : Allows you to specify a field name to include the file name (String) in the output of this transform.
+* Rownum in output? : Allows you to specify a field name to include the row number (Integer) in the output of this transform.
+
+|Add to result filename a|
+* Add files to result filename : Adds the XML filenames read to the result of this pipeline.  A unique list is being kept in memory that can be used in the next workflow action in a workflow, for example in another pipeline.
+|===
+
+=== Fields Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Name|The name of the output field
+|XPath|The path to the element node or attribute to read
+|Element|The element type to read: Node or Attribute
+|Type|The data type to convert to
+|Format|The format or conversion mask to use in the data type conversion
+|Length|The length of the output data type
+|Precision|The precision of the output data type
+|Currency|The currency symbol to use during data type conversion
+|Decimal|The numeric decimal symbol to use during data type conversion
+|Group|The numeric grouping symbol to use during data type conversion
+|Trim type|The type of trimming to use during data type conversion
+|Repeat|Repeat the column value of the previous row if the column value is empty (null) 
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/groupby.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/groupby.adoc
new file mode 100644
index 0000000..750837a
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/groupby.adoc
@@ -0,0 +1,65 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/groupby/src/main/doc/groupby.adoc
+= Group By
+
+== Description
+
+This transform groups rows from a source, based on a specified field or collection of fields. A new row is generated for each group. It can also generate one or more aggregate values for the groups. Common uses are calculating the average sales per product and counting the number of an item you have in stock.
+
+The Group By transform is designed for sorted inputs. If your input is not sorted, only double consecutive rows are grouped correctly. If you sort the data outside of Hop, the case sensitivity of the data in the fields may produce unexpected grouping results.
+
+You can use the Memory Group By transform to handle non-sorted input.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Include all rows?|Enable if you want all rows in the output, not just the aggregation; to differentiate between the two types of rows in the output, a flag is required in the output. You must specify the name of the flag field in that case (the type is boolean).
+|Temporary files directory|The directory in which the temporary files are stored (needed when the Include all rows option is enabled and the number or grouped rows exceed 5000 rows); the default is the standard temporary directory for the system
+|TMP-file prefix|Specify the file prefix used when naming temporary files
+|Add line number, restart in each group|Enable to add a line number that restarts at 1 in each group
+|Line number field name|Enable to add a line number that restarts at 1 in each group
+|Always give back a row|If you enable this option, the Group By transform will always give back a result row, even if there is no input row. 
+This can be useful if you want to count the number of rows.  Without this option you would never get a count of zero (0).
+|Group fields table|Specify the fields over which you want to group. Click Get Fields to add all fields from the input stream(s).
+|Aggregates table a|Specify the fields that must be aggregated, the method and the name of the resulting new field.
+Here are the available aggregation methods :
+
+- Sum
+- Average (Mean)
+- Median
+- Percentile
+- Minimum
+- Maximum
+- Number of values (N)
+- Concatenate strings separated by , (comma)
+- First non-null value
+- Last non-null value
+- First value (including null)
+- Last value (including null)
+- Cumulative sum (all rows option only!)
+- Cumulative average (all rows option only!)
+- Standard deviation
+- Concatenate strings separated by <Value>: specify the separator in the Value column
+- Number of distinct values 
+- Number of rows (without field argument)
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/http.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/http.adoc
new file mode 100644
index 0000000..e780daf
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/http.adoc
@@ -0,0 +1,67 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/http/src/main/doc/http.adoc
+= HTTP client
+
+== Description
+
+The HTTP client transform performs a simple call to a base URL with options appended as shown below:
+
+====
+http://<URL>?param1=value1&amp;param2=value2&amp;param3..
+====
+
+The result is stored in a String field with the specified name. 
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline
+|URL|The base URL string
+|Accept URL from field?|Enable this option if you want to get the URL from a previous transform.
+Enabling this will also allow you to specify the name of the input field.
+|URL field name|The name of the incoming field that contains the URL
+|Connection timeout|
+|Socket timeout|The number of seconds to wait if no data is returned from the server.
+|Connection close wait time
+|Result fieldname|The name of the field to store results
+|HTTP status code field name|The name of the field to store the HTTP response code (e.g. 200, 404)
+|Response time (milliseconds) field name|The name of the field to store the response time
+|Http Login|The username to be passed during HTTP (Basic) authentication
+|HTTP Password|The password to be passed during HTTP (Basic) authentication
+|Proxy Host|The hostname of the Proxy Server to be used
+|Proxy Port|The port number of the Proxy Server to be used
+|Parameters|Area where you define the parameter name-value pairs to pass on the URL
+|Custom HTTP Headers|Area where you define optional HTTP headers 
+|===
+
+== FAQ
+=== The HTTP client transform doesn't do anything
+
+*Q*: The HTTP client transform doesn't do anything, how do I make it work?
+
+*A*: The HTTP client transform needs to be triggered. Use a Row generator transform generating e.g. 1 empty row and link that with a hop to the HTTP client transform.
+
+=== The HTTP client transform and SOAP
+
+*Q*: Does the HTTP client support SOAP?
+
+*A*: No, it just calls an URL with arguments. Future transforms may provide SOAP functionality, Work is underway on a WebService transform supporting WSDL.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/httppost.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/httppost.adoc
new file mode 100644
index 0000000..f5ea3dd
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/httppost.adoc
@@ -0,0 +1,78 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/httppost/src/main/doc/httppost.adoc
+= HTTP Post
+
+== Description
+
+This transform uses an HTTP POST command to submit form data via a URL.
+
+== Options
+
+=== General Tab
+
+The General tab defines which RSS/Atom URLs you want to use, and optionally which fields contain the URLs.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|URL|The Web service URL to submit to.
+|Accept URL from field?|If checked, you must specify which field to retrieve the URL from.
+|URL field name|If the previous option is checked, this is where you specify the URL field.
+|Encoding|The encoding standard for the files being accessed.
+|Request entity field|The name of the field that will contain the POST request. When enabled, the Post a file option will retrieve the file named in this field, and post the contents of that file.
+|Post a file|If a file is defined in the Request entity field, its contents will be posted if this option is checked. Currently "Request entity field" must be filled in order for "Post a file" to work. Selecting "Post a file" and specifying a field under "Body parameters" without selecting a value for "Request entity field" (the place for the file name) will fail silently.
+|Connection timeout|Defines the timeout (defaults to 10000) in Milliseconds when a connection attempt will error out.
+|Socket timeout|Defines the timeout (defaults to 10000) in Milliseconds when a socket will error out.
+|Connection close wait time|Define the wait time after the connection is closed in Milliseconds, the default -1 means the default wait time from the operating system (often 2 minutes).
+Background information: Each row opens a connection and keeps it in a so called TIME-WAIT state for a specific time. A lot (may be thousands) of connections in a TIME-WAIT state introduce significant memory overhead. This option can reduce this memory overhead by reducing the time to keep a closed connection in a TIME-WAIT state.
+|Result fieldname|The field that you want to post the result output to.
+|HTTP status code fieldname|The field that you want to post the status code output to.
+|Response time (milliseconds) fieldname|The field that you want to post the response time, in milliseconds, to.
+|HTTP login|If this form requires authentication, this field should contain the username.
+|HTTP password|If this form requires authentication, this field should contain the password that corresponds with the username.
+|Proxy host|Hostname or IP address of the proxy server, if you use one.
+|Proxy port|Port number of the proxy server, if you use one. 
+|===
+
+=== Fields tab: Body (Header) Parameters
+
+The Fields tab defines parameters for the HTTP request header and body. If you've filled in the URL and other necessary details in the General tab, you can use the Get values buttons to pre-populate the fields here. Body parameters are used in POST and PUT operations.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|#|The order that this parameter will be passed to the Web application.
+|Name|The name of the field that contains the value to map to the parameter.
+|Parameter|The parameter to map the value of Name to.
+|Put in Header?|If set to Y, the parameter will be put into the request header. 
+|===
+
+=== Fields tab: Query Parameters
+
+The Fields tab defines parameters for the HTTP request header and body. If you've filled in the URL and other necessary details in the General tab, you can use the Get values buttons to pre-populate the fields here. Query parameters are specified in the URL and can be used in any HTTP method.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|#|The order that this parameter will be passed to the Web application.
+|Name|The name of the field that contains the value to map to the parameter.
+|Value|The value to map to the parameter. 
+|===
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ifnull.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ifnull.adoc
new file mode 100644
index 0000000..83d02fc
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ifnull.adoc
@@ -0,0 +1,33 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/ifnull/src/main/doc/ifnull.adoc
+= If Null
+
+
+== Description
+
+The transform "If field value is null" is able to replace nulls by a given value either by:
+
+1. Processing the complete row with all fields
+2. Processing the complete row but only for specific field types (Number, String, Date etc.)
+3. Processing the complete row but only for specific fields by name
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/injector.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/injector.adoc
new file mode 100644
index 0000000..04a00d8
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/injector.adoc
@@ -0,0 +1,36 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/injector/src/main/doc/injector.adoc
+= Injector
+
+== Description
+
+Injector was created for those people that are developing special purpose pipelines and want to 'inject' rows into the pipeline using the Hop API and Java. Among other things you can build 'headless' pipelines with it: pipelines that have no input at design time: do not read from file or database.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Fieldname|Specify the field name of the rows to inject.
+|Type|Specify the type of data.
+|Length|For Number: Total number of significant figures in a number; For String: total length of string; For Date: length of printed output of the string.
+|Precision|For Number: Number of floating point digits; For String, Date, Boolean: unused. 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/insertupdate.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/insertupdate.adoc
new file mode 100644
index 0000000..dc44322
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/insertupdate.adoc
@@ -0,0 +1,40 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/insertupdate/src/main/doc/insertupdate.adoc
+= Insert / Update
+
+== Description
+
+The Insert/Update transform first looks up a row in a table using one or more lookup keys. If the row can't be found, it inserts the row. If it can be found and the fields to update are the same, nothing is done. If they are not all the same, the row in the table is updated.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Commit size|The number of rows to change (insert / update) before running a commit.
+|Connection|The database connection to which data is written
+|Don't perform any updates|If enabled, the values in the database are never updated;only inserts are performed.
+|Key Lookup table|Allows you to specify a list of field values and comparators. You can use the following comparators: =, = ~NULL, <>, <, <=, >, >=, LIKE, BETWEEN, IS NULL, IS NOT NULL
+|SQL button|Click SQL to generate the SQL to create the table and indexes for correct operation.
+|Transform name|Name of the transform; this name has to be unique in a single pipeline.
+|Target schema|The name of the Schema for the table to which data is written. This is important for data sources that allow for table names with periods in them.
+|Target table|Name of the table in which you want to do the insert or update.
+|Update Fields|Allows you to specify all fields in the table you want to insert/update including the keys. Avoid updates on certain fields by specifying N in the update column.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/janino.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/janino.adoc
new file mode 100644
index 0000000..3594857
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/janino.adoc
@@ -0,0 +1,159 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/janino/src/main/doc/janino.adoc
+= User Defined Java Expression
+
+== Description
+
+This transform allows you to enter User Defined Java Expressions as a basis for the calculation of new values.
+
+If you have a Java expression like :
+[source,java]
+----
+C=A+B
+----
+
+Then you can simply enter the right side of the expression in the dialog:
+[source,java]
+----
+A+B
+----
+
+The values are exposed to the expressions as the Java objects they are :
+
+[width="90%", options="header"]
+|===
+|Data type|Java Class
+|BigNumber|BigDecimal
+|Binary|byte[]
+|Date|java.util.Date
+|Integer|java.lang.Long
+|Number|java.lang.Double
+|String|java.lang.String 
+|===
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|New Field|The new field in the data stream. If you want to overwrite an existing field, you need to define the field here and in the "Replace value" option.
+|Java Expression|The Java Expression, see examples below.
+|Value Type|Type
+|Length|Length
+|Precision|Precision
+|Replace value|Select this identical to the "New field" name when you want to replace
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
+
+== Examples
+
+**Add 2 integers, A and B**
+
+[source,java]
+----
+A+B
+----
+
+**Concatenate 2 Strings : firstname and name and put a space in between**
+
+[source,java]
+----
+firstname+" "+name
+----
+
+or if you really care about performance, this might be faster: 
+
+[source,java]
+----
+new StringBuffer(firstname).append(" ").append(name).toString()
+----
+
+**Use native Java and API functions**
+
+[source,java]
+----
+System.getProperty("os.name")
+----
+
+**Business rules (If / Then / Else)**
+
+[source,java]
+----
+a<c?true:false
+----
+
+This can be more complicated
+
+[source,java]
+----
+a<c?(a==1?1:2):3
+----
+
+even with OR and AND and other operators and functions
+
+**Using Constants**
+
+If you use a constant, you may need to define the right type in some expressions otherwise it could throw:
+
+Incompatible expression types "int" and "java.lang.Long"
+
+To solve this, use:
+
+[source,java]
+----
+test == null ? new Long(0) : test
+----
+
+In this case, it checks if test is null and replaces with zero. If it is not null, it will return test.
+
+**Cut a string from end and test for null and minimal length**
+
+Imagine you have input strings with
+
+    Orlando FL
+    New York NY
+
+and you want to separate the state and city, you could use the following expressions:
+
+For state (get the last 2 characters):
+
+[source,java]
+----
+location != null && location.length()>2 ? location.substring(location.length()-2, location.length()) : null
+----
+
+For city (get the beginning without the last 2 characters and trim):
+
+[source,java]
+----
+location != null && location.length()>2 ? location.substring(0, location.length()-2).trim() : location
+----
+
+**Functionality of a LIKE operator (contains string)  and replacing values**
+
+The following example returns 1 when abc is within the source string, otherwise 2. It returns also 2 when the source string is null. The return values could be of value type Integer.
+
+[source,java]
+----
+samplestr !=null && samplestr.indexOf("abc")>-1 ? 1 : 2
+----
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/javafilter.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/javafilter.adoc
new file mode 100644
index 0000000..2433c5f
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/javafilter.adoc
@@ -0,0 +1,71 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/javafilter/src/main/doc/javafilter.adoc
+
+= Java Filter
+
+== Description
+
+The Java Filter transform allows the stream to be filtered using user defined Java expressions. The input stream, coming from one or more transforms, can be redirected to two different transforms based on the evaluation of the written expression.
+
+In other words, the user is able to perform an if-statement to filter the data stream with pure java expressions:
+
+[source,java]
+----
+if( Condition )
+  {matching-transform}
+else
+  {non-matching transform}
+----
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform this name has to be unique in a single pipeline.
+|Destination transform for matching rows (optional)|The rows for which the written condition is evaluated to true are sent to this transform.
+|Destination transform for non-matching rows (optional)|The rows for which the written condition is evaluated to false are sent to this transform.
+|Condition (Java Expression)|Defines the Java condition on which to filter the data. See examples below.
+|===
+
+=== Examples
+
+These code samples applies to the Condition (Java Expression) field.
+
+Filters a string that contains white space
+
+[source,java]
+----
+field.contains(" ");
+----
+
+Filters a string that is identical to a constant string
+
+[source,java]
+----
+field.equals("Positive");
+----
+
+Filters a boolean value
+
+[source,java]
+----
+field == Boolean.TRUE
+----
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/javascript.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/javascript.adoc
new file mode 100644
index 0000000..ab8e01c
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/javascript.adoc
@@ -0,0 +1,42 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/javascript/src/main/doc/javascript.adoc
+
+= JavaScript
+
+== Description
+
+The JavaScript transform allows you to create JavaScript scripts for each transform.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform this name has to be unique in a single pipeline.
+|Scripts|Displays a list of scripts you have created in this transform.
+|Constants|A list of pre-defined, static constants.
+|Functions|Contains a variety of String, Numeric, Date, Logic and specialized functions you can use to create your script. To add a function to your script, simply double-click on the function or drag it to the location in your script that you wish to insert it.
+|Input Fields|A list of inputs coming into the transform. Double-click or use drag and drop to insert the field into your script.
+|Output Fields|A list of outputs for the transform.
+|Java Script|This section is where you edit the script for this transform. You can insert functions, constants, input fields, etc. from the tree control on the left by double-clicking on the node you wish to insert or by dragging the object onto the Java Script panel.
+|Fields|The Fields table contains a list of variables from your script including the ability to add metadata like a descriptive name.
+|Get Variables|Retrieves a list of variables from your script.
+|Test script|Use this button to test the syntax of your script.
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/joinrows.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/joinrows.adoc
new file mode 100644
index 0000000..031b39e
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/joinrows.adoc
@@ -0,0 +1,42 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/joinrows/src/main/doc/joinrows.adoc
+
+= Join Rows
+
+== Description
+
+The Join rows transform allows you to produce combinations (Cartesian product) of all rows in the input streams.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform this name has to be unique in a single pipeline.
+|Temp directory|Specify the name of the directory where the system stores temporary files in case you want to combine more then the cached number of rows.
+|TMP-file prefix|This is the prefix of the temporary files that will be generated.
+|Max. cache size|The number of rows to cache before the system reads data from temporary files; required when you want to combine large row sets that do not fit into memory.
+|Main transform to read from|Specifies the transform from which to read most of the data; while the data from other transforms are cached or spooled to disk, the data from this transform is not.
+|The Condition(s)|You can enter a complex condition to limit the number of output row.
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/jsoninput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/jsoninput.adoc
new file mode 100644
index 0000000..8de4e76
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/jsoninput.adoc
@@ -0,0 +1,125 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/jsoninput/src/main/doc/jsoninput.adoc
+= JSON Input
+
+== Description
+
+This transform reads data from JSON structures, files, or incoming fields using a JSONPath expression to extract data and output rows. JSONPath expressions can use either dot notation or square bracket notation.
+
+== Options
+
+=== General Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Source is from a previous transform a|Select to retrieve the source from a previously defined field. When selected, the following fields are available:
+
+* Select field
+* Use field as file names
+* Read source as URL
+* Do not pass field downstream
+
+When this option is cleared, the following fields are available:
+
+* File or directory
+* Regular Expression
+* Exclude Regular Expression
+* Selected files
+
+|Select field|Specify the field name to use as a source from a previous transform.
+|Use field as file names|Select to indicate the source is a filename.
+|Read source as URL|Select to indicate if the source should be accessed as a URL.
+|Do not pass field downstream|Select to remove the source field from the output stream. This action improves performance and memory utilization with large JSON fields.
+|File or directory|Specify the source location if the source is not defined in a field. |Click Browse to navigate to your source file or directory. Click Add to include the source in the Selected files table.
+|Regular expression|Specify a regular expression to match filenames within a specified directory.
+|Exclude regular expression|Specify a regular expression to exclude filenames within a specified directory.
+|File/Directory|The source location indicated by clicking Add after specifying it in File or directory.
+|Wildcard (RegExp)|Wildcards as specified in Regular expression.
+|Exclude wildcard|Excluded wildcards as specified in Exclude regular expression.
+|Required|Required source location for input.
+|Include subfolders|Whether subfolders are included within the source location.
+|Delete|Remove a source from the table
+|Edit|Remove a source from the table and return it back to the File or directory option.
+|Show filename(s)|Display the file names of the sources successfully connected to the JSON Input transform.
+|===
+
+=== Content Tab
+
+The Content tab contains the following options for configuring which data to retrieve:
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Ignore empty file|Select to skip empty files. When cleared, empty files will cause the process to fail and stop.
+|Do not raise an error if no files|Select to continue when no files are available to process.
+|Ignore missing path|Select to continue processing files when an error occurs that (1) no fields match the JSON path or (2) that all the values are null. When cleared, no further rows are processed when an error occurs.
+|Default path leaf to null|Select to return a null value for missing paths.
+|Limit|Specify a limit on the number of records generated from the tra. Results are not limited when set to zero.
+|Include filename in output|Select to add a string field with the filename in the result.
+|Rownum in output|Select to add an integer field with the row number in the result.
+|Add filenames to result|Select to add processed files to the result file list.
+|===
+
+=== Fields Tab
+
+The Fields tab displays field definitions to extract values from the JSON structure. The table in this tab contain the following columns:
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Name|Name of field that maps to the corresponding field in the JSON input stream.
+|Path|Complete path of the field name in the JSON input stream. All records can be retrieved by adding the asterisk * in the path. For example, $.mydata.*
+|Type|Data type of the input field.
+|Format|An optional mask for converting the format of the original field. See Common Formats for information on common valid date and numeric formats you can use in this transform.
+|Length|Length of the field.
+|Precision|Number of floating point digits for number-type fields.
+|Currency|Currency symbol ($ or €, for example).
+|Decimal|A decimal point can be a . (5,000.00 for example) or , (5.000,00 for example).
+|Group|A grouping can be a , (10,000.00 for example) or . (5.000,00 for example).
+|Trim type|The trim method to apply to a string.
+|Repeat|The corresponding value from the last row repeated if a row is empty.
+|Get fields|Populate the table with fields derived from the source file.
+|===
+
+=== Select fields
+
+Click Get Fields in the Fields tab to open the Select Fields window. Select the checkbox next to each field in your source file that you want include in your output. All the fields selected in this transform are added to the table. You can search for a field name by entering the field name in the Search box.
+
+=== Additional output fields tab
+
+The Additional output fields tab contains the following options to specify additional information about the file to process:
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Short filename field|Specify the field that contains the filename without path information but with an extension.
+|Extension field|Specify the field that contains the extension of the filename.
+|Path field|Specify the field that contains the path in operating system format.
+|Size field|Specify the field that contains the size of the data.
+|Is hidden field|Specify the field indicating if the file is hidden or not (Boolean).
+|Last modification field|Specify the field indicating the date of the last time the file was modified.
+|Uri field|Specify the field that contains the URI.
+|Root uri field|Specify the field that contains only the root part of the URI.
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/jsonoutput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/jsonoutput.adoc
new file mode 100644
index 0000000..5775e38
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/jsonoutput.adoc
@@ -0,0 +1,201 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/jsonoutput/src/main/doc/jsonoutput.adoc
+= JSON Output
+
+== Description
+
+Json Output transform allows to generate json blocks based on input transform values. Output json will be available as java script array or java script object depends on transform settings.
+
+
+== Options
+
+=== General Tab
+
+General tab allows to specify type of transform operation, output json structure, transform output file. This file will be used to dump all generated json.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline.
+|Operation a|Specify transform operation type. Currently available 3 types of operation:
+
+   1. Output value - only pass output json as a transform output field, do not dump to output file
+   2. Write to file - only write to fie, do not pass to output field
+   3. Output value and write to file - dump to file and pass generated json as a transform output file
+
+|Json block name|This value will be used as a name for json block. Can be empty string that will affect output json structure, see detailed description below.
+|Nr. rows in a block|Number of json block key - value pairs.
+NOTE, 1 is a special values, in case of 1 every output will be generated as one object. See description below.
+|Output value|This value will be used as a transform output field. Will contain generated json output block depending on transform settings.
+|Compatibility mode|Changes the default fixed mode json structure 
+|===
+
+**Compatibility mode**
+
+By default this transform uses the fixed structure mode, consider the Json Output transform has the following settings:
+
+* Json block name = "data"
+* Nr rows in block = 3
+* Compatibility mode = NOT checked (and this is the default option)
+
+This will output:
+
+[source, json]
+----
+{
+  "data" : [ {
+    "name" : "item",
+    "value" : 25
+  }, {
+    "name" : "item",
+    "value" : 25
+  }, {
+    "name" : "item",
+    "value" : 25
+  } ]
+}{
+  "data" : [ {
+    "name" : "item",
+    "value" : 25
+  } ]
+}
+----
+
+If compatibility mode is enabled and the transform has the following settings:
+
+* Json block name = "data"
+* Nr rows in block = 3
+* 'Compatibility mode' is checked
+
+This will output:
+
+[source, json]
+----
+{"data":[{"name":"item"},{"value":25},{"name":"item"},{"value":25},{"name":"item"},{"value":25}]}
+{"data":[{"name":"item"},{"value":25}]}
+----
+
+Pretty formatting does not affect compatibility mode. We have 2 output json objects. First object harvest first 3 input rows and second object harvests only one row. This happens because of number of rows in a block is 3. Anyway it can be considered as incorrect result, as the real object count for array is 6 for the first output object. By default compatibility mode is disabled.
+
+If 'Json block name' is an empty string (by default it has 'data' value) - compatibility mode will use empty string for block name. Normally - if compatibility mode was not checked, transform output will be:
+
+[source,json]
+----
+[ {
+  "name" : "item",
+  "value" : 25
+}, {
+  "name" : "item",
+  "value" : 25
+}, {
+  "name" : "item",
+  "value" : 25
+} ][ {
+  "name" : "item",
+  "value" : 25
+} ]
+----
+
+We will have just 4 simple json objects that will be outputted as a 4 transform output rows.
+
+In case of json block name is defined - output structure will looks like:
+
+[source, json]
+----
+{
+  "data" : {
+    "name" : "item",
+    "value" : 25
+  }
+}{
+  "data" : {
+    "name" : "item",
+    "value" : 25
+  }
+}{
+  "data" : {
+    "name" : "item",
+    "value" : 25
+  }
+}{
+  "data" : {
+    "name" : "item",
+    "value" : 25
+  }
+}
+----
+
+So this is will be same 4 output objects with json block name defined.
+
+If 'Nr. rows in a block' will be less that 1 output will be as a one object:
+
+[source, json]
+----
+{
+  "data" : [ {
+    "name" : "item",
+    "value" : 25
+  }, {
+    "name" : "item",
+    "value" : 25
+  }, {
+    "name" : "item",
+    "value" : 25
+  }, {
+    "name" : "item",
+    "value" : 25
+  } ]
+}
+----
+
+This will be one object (one output row) with data block containing json array with 4 objects (as we had 4 input data rows). Please note - when using 0 'Nr. rows in a block' transform will build output object until input data is available. When input is done - one big output object will be passed to output row. For big input data it can impact memory usage.
+
+=== Output File
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Filename|full path to output file
+|Append|If not checked new file will be created every time transform is running. If file with specified name already existed - it will be replaced by a new one. If checked - new json output will be appended at the end of existing file. Or if existing file is not exists - it will be created as in previous case.
+|Create Parent folder|Usually file name contains some path folder as a parent folder. If parent folder does not exists and this option is checked - parent folder will be created as a new folder. Otherwise - file not be found and transform will fail.
+|Do not open create at start|If not checked - file (and in some cases parent folder) will be created/opened to write during pipeline initialization. If checked - file and parent folder will be created only after transform will get any first input data.
+|Extension|Output file extension. Default value is 'js'
+|Encoding|Output file encoding
+|Pass output to servlet|Enable this option to return the data via a web service instead writing into a file.
+|Include date in filename?|If checked - output file name will contains File name value + current date. This may help to generate unique output files.
+|Include time in filename|If checked - output file name will contains file creation time. Same as for 'Include date in filename' option
+|Show filename(s) button|Can be useful to test full output file path
+|Add file to result filenames?|If checked - created output file path will be accessible form transform result 
+|===
+
+=== Fields Tab
+
+This tab is used to map input transform fields to output json values
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Element name|Json element name as a key. For example "A":"B" - A is a element name, B is actual input value mapped for this Element name.
+|Fieldname|Input transform field name. Use 'Get Fields' button to discover available input fields 
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ldapinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ldapinput.adoc
new file mode 100644
index 0000000..5fed97d
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ldapinput.adoc
@@ -0,0 +1,93 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/ldapinput/src/main/doc/ldapinput.adoc
+= LDAP Input
+
+== Description
+
+The LDAP Input transform allows you to read information like users, roles and other data from an LDAP server. The following sections describe the available options for the LDAP input transform.
+
+
+== General Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline.
+|Host|Host can be the IP address of the LDAP directory server or its DNS name.
+|Port|Port number of LDAP directory server.
+|Use authentication|To enable LDAP authentication
+|Username|The username to be passed during authentication.
+|Password|The password to be passed during authentication.
+|Use certificate|To enable the use of a certificate.
+|Trust store path|
+|Trust store password|
+|Trust all certificates|
+|Test connection|Tests the configured connection to the LDAP server.
+|===
+
+== Search Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Dynamic search base|Enables the search base field.
+|Search base fieldname|The field containing the base LDAP node.
+|Search base|Base LDAP node to search the LDAP contents.
+|Dynamic filter string|Enables the filter string field.
+|Filter string fieldname|The field containing the filter string.
+|Filter String|LDAP filter to search the customized contents, in the current stage only "single filter format" is supported like. 
+ Example: mail=*
+|===
+
+== Advanced Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Include rownum in output?|Adds a row number to the output.
+|Rownum fieldname|The field containing the row number.
+|Limit|Limit the number of results returned.
+|Time limit|Limit the amount of time given to return results.
+|Multi valued field separator.|
+|Set paging|
+|Page size|
+|Search scope|Object scope, One level scope, Subtree scope
+|===
+
+== Fields Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Name|The name of the imported field. Note: If you renamed a field in the Select & Alter tab, the name here must match the renamed value.
+|Attribute|
+|Fetch as|String, Binary
+|Is sorted Key?| Y/N
+|Type|The data type for this field.
+|Format|The format mask (number type or date format)
+|Length|The field length.
+|Precision|The precision option depends on the field type, but only Number is supported; it returns the number of floating point digits.
+|Currency|Symbol used to represent currencies
+|Decimal|A decimal point; this is either a dot or a comma
+|Group|A method of separating units of thousands in numbers of four digits or larger. This is either a dot or a comma.
+|Trim type|Where to trim the field, left, right, both, none
+|Repeat|If LDAP returns no value for an attribute, use the value from the previous row
+|Get fields|Gets the result fields
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ldapoutput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ldapoutput.adoc
new file mode 100644
index 0000000..94b14cb
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/ldapoutput.adoc
@@ -0,0 +1,88 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/ldapoutput/src/main/doc/ldapoutput.adoc
+= LDAP Output
+
+== Description
+
+The LDAP Output transform allows you to write information like users, roles and other data to an LDAP server. The following sections describe the available options for the LDAP Output transform.
+
+
+== General Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform; this name has to be unique in a single pipeline.
+|Host|Host can be the IP address of the LDAP directory server or its DNS name.
+|Port|Port number of LDAP directory server.
+|Referral a|Determines how to handle referrals.
+
+* Ignore: Ignore referrals.
+* Follow: 	Automatically follow any referrals.
+
+|Derefalias a|Determines how aliases are dereferenced.
+
+* Always: Always dereference aliases.
+* Never: Never dereference aliases.
+* Finding: Dereference aliases only during name resolution.
+* Searching: Dereference aliases only after name resolution.
+
+|Protocol a|Determines which protocol is used.
+
+* LDAP
+* LDAP SSL
+* LDAP TLS
+
+|Use authentication|To enable LDAP authentication.
+|Username|The username to be passed during authentication.
+|Password|The password to be passed during authentication.
+|Use certificate|To enable the use of a certificate.
+|Trust store path|
+|Trust store password|
+|Trust all certificates|
+|Test connection|Tests the configured connection to the LDAP server.
+|===
+
+== Settings
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Operation|Insert, Update, Upsert, Add atribute, Delete, Rename DN (distinguished name).
+|Multi valued field seperator|
+|Fail if not exist|
+|Dn fieldname|Field which contains the distinguished name.
+|Old DN fieldname|Field which contains the old distinguished name (rename).
+|New DN fieldname|Field which contains the new distinguished name (rename).
+|Delete RDN|Whether or not to delete the RDN (relative distinguished name)
+|===
+
+== Fields Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Search base for fields|Where to start looking for fields
+|Attributes|
+|Stream field|
+|Update|Whether or not to update the fields (Y/N)
+|Get fields|
+|Edit mapping|
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/loadfileinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/loadfileinput.adoc
new file mode 100644
index 0000000..123d62c
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/loadfileinput.adoc
@@ -0,0 +1,100 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/loadfileinput/src/main/doc/loadfileinput.adoc
+= Load file content in memory
+
+== Description
+
+Loads the file content in memory.
+
+== Options
+
+=== File Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Filename is defined in a field|Takes a filename from a field
+|get filename from a field|A field containing the filename
+|File or directory|The file or directory path to be added
+|Add|Adds the file or directory to the selected files
+|Browse|Uses the local file browser to select a path
+|Regular Expression|Allows files to be included based on a regular expression
+|Exclude Regular Expression|Allows files to be excluded based on a regular expression
+|Selected files|The selected files to load into memory
+|Show filename(s)|Previews the selected files
+|===
+
+=== Content Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Encoding|The file encoding (UTF, ISO, ...)
+|Ignore empty file|Ignores empty files
+|Ignore missing path|Ignores missing paths
+|Limit|Limit the rows loaded from the file
+|Include filename in output?|Allows the filename to be included in the output
+|Filename fieldname|Field containing the filename
+|Rownum in output?|Allows the row number to be included in the output
+|Rownum filename|Field containing the row number
+|Add files to result filesname|Add files to resulting files name
+|===
+
+=== Fields Tab
+
+The fields to load from the file.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Name|The name of the imported field.
+|Element|File content or size
+|Type|The data type for this field
+|Format|The format mask
+|Length|The field length
+|Precision|The precision option depends on the field type, but only Number is supported; it returns the number of floating point digits
+|Currency|Symbol used to represent currencies
+|Decimal|A decimal point; this is either a dot or a comma
+|Group|A method of separating units of thousands in numbers of four digits or larger. This is either a dot or a comma.
+|Trim type|Type of trim: none, left, right, both
+|Repeat|Enter ‘Y’ if you want to reuse the value from the last non-empty row when the corresponding value in this row is empty. Enter ‘N’ to leave the row empty.
+|Get fields|Retrieve available fields based on the file contents
+|===
+
+=== Additional output Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Short filename field|The field name that contains the filename without path information but with an extension.
+|Extension field|The field name that contains the extension of the filename.
+|Path field|The field name that contains the path in operating system format.
+|Is hidden field|The field name that contains if the file is hidden or not (boolean).
+|Last modification field|The field name that contains the last modification.
+|Uri field|The field name that contains the URI.
+|Root uri field|The field name that contains only the root part of the URI.
+|===
+
+=== Buttons
+[width="90%", options="header"]
+|===
+|Option|Description
+|Preview rows|Preview the rows generated by this transform.
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mail.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mail.adoc
new file mode 100644
index 0000000..0391bbd
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mail.adoc
@@ -0,0 +1,116 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mail/src/main/doc/mail.adoc
+= Mail
+
+== Description
+
+This transform uses an SMTP server to send an email containing data from the previous transform.
+
+== Options
+
+=== Addresses
+
+This tab defines the sender, contact person, and recipients of a Hop-generated email.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Destination address|The destination for the email. This can be a single address, a space-separated list of addresses , or an email alias for a distribution list
+|Cc|An email address, space-separated list of email addresses, or a distribution list to send a carbon copy of the email to.
+|Bcc|An email address, space-separated list of email addresses, or a distribution list to send a blind carbon copy of the email to.
+|Sender name|The name of the person you want the email to be from.
+|Sender Address|The email address of the person or account you want the email to be from.
+|Reply to|The email address that recipients will use if they reply to the email.
+|Contact|The name of the person to contact regarding the email's contents.
+|Contact phone|The phone number of the contact person defined in the previous field. 
+|===
+
+=== Server
+
+This tab contains details for your SMTP server, including authentication and encryption.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|SMTP server|URL, hostname, or IP address of your SMTP server.
+|Port|Port number for your SMTP service.
+|Use authentication|If checked, you will be able to enter an SMTP username and password in the next few fields.
+|Authentication user|The SMTP username to use for server authentication.
+|Authentication password|The password for the previously defined SMTP username.
+|Use secure authentication|If checked you will be able to specify SSL or TLS encryption in the next field.
+|Secure connection type|Determines whether the server will use SSL or TLS encryption protocols. 
+|===
+
+=== Email Message
+
+This tab determines the text content of the email.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Include date in message?|If checked, the date will be printed in the email body.
+|Only send comment in mail body|If checked, information about the pipeline will not be included.
+|Use HTML format in mail body?|If checked, this email will be in HTML format instead of plain text.
+|Encoding|Character encoding for the text of an HTML email.
+|Manage priority|If checked, enables the following two fields to set email priority and importance levels.
+|Priority|The priority level to assign in the email metadata.
+|Importance|The importance level to assign in the email metadata.
+|Sensitivity|This allows to set the "Sensitivity" header information to Normal, Personal, Private, Confidential.
+|Subject|The email subject line.
+|Comment|The email body. See also the option "Attach content file". 
+|===
+
+=== Attached Files
+
+This tab contains options for file attachments.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Attach content file?|If checked, you will use the next two fields to define which stream fields you want to use to create dynamic content of the mail message.
+|Content fieldname|The given field contains the field that points to a file that is loaded for the content. Internally this file content is put into is MimeBodyPart (using application/x-any).
+|Filename fieldname|This field sets the filename within the MimeBodyPart.
+|Dynamic filenames?|If checked, you will use the next two fields to define which stream fields you want to use to create dynamic filenames for your attachments.
+|Filename field|The stream field you want to use for dynamic filenames of attachments. This can also be a folder name, in which case you would use the Wildcard field to determine filenames.
+|Wildcard field|A regular expression that creates dynamic filenames for attachments.
+|Filename/foldername|A static name and location of a file to attach.
+|Include subfolders|If checked, will attach files in subfolders of the specified folder.
+|Wildcard|A regular expression that identifies a file to attach.
+|Zip files|If checked, multiple file attachments will be zipped into a single archive before attaching to the email.
+|Is zip filename dynamic?|If checked, the name of the zip archive will be determined by a data stream.
+|Zipfilename field|The data field to use for the name of the zip archive.
+|Zip filename|A static name for the zip archive.
+|Zip files if size greater than|Only archives file attachments if their combined size is above this number (in bytes). 
+|===
+
+=== Embedded Images
+
+This tab contains options for embedded images in HTML emails.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Filename|The name and location of the file you want to embed in the email.
+|Content ID|A unique identifier for this file. Hop will generate one if you don't specify one yourself.
+|#|The order that the attachment will be processed.
+|Image|The name of as added image.
+|Content ID (field)|The content ID of an added image. 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mailinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mailinput.adoc
new file mode 100644
index 0000000..024fd1e
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mailinput.adoc
@@ -0,0 +1,93 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mailinput/src/main/doc/mailinput.adoc
+= Email Messages Input
+
+== Description
+
+This transform allows you to retrieve messages and their attachments from a mail server using the POP3, IMAP or MBOX standard protocols.
+
+== Options
+
+=== General
+
+On this tab you will find the general mail server connection settings:
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Source host|The mail server host
+|Use SSL?|Check this option if your server needs an SSL connection
+|Username|The username of the mailbox to connect to
+|Password|The password to use
+|Use proxy?|Check this option if you want to connect through a proxy.
+|Proxy username|The username to connect to the proxy with
+|Fetch in batches?|Enable this option if you want to retrieve larger volumes of mails in batches
+|Ignore errors reading fields|Sometimes a server doesn't support the retrieval of a particular piece of information. Enable this option to ignore these errors.
+|Protocol|Specify either standard protocol POP3, IMAP or MBOX to retrieve the mails
+|Test Connection button|This will simply try to connect to the mail server with the specified settings. This button does not retrieve mails. 
+|===
+
+=== Settings
+
+**POP3 settings**
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Retrieve|Specify either "All emails" or "Retrieve first... emails"
+|Retrieve the ... first emails|Allows you to specify how many mails you want to retrieve at most in one go  
+|===
+
+**IMAP settings**
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Get folder from field|Enable this option to allow the IMAP folder name to be present in an input field of this transform.
+|Folder field|The folder field in case the option above is enabled.
+|IMAP folder|The IMAP folder to retrieve mails from
+|Test folder... button|Use this to test if the folder is valid
+|Open button|use this to select the IMAP folder
+|Include subfolders|Check this option to also retrieve mails from sub-folders
+|Retrieve|Specify which mails to retrieve: Get all, new, old, read, unread, flagged, not flagged, draft, not draft, answered or not answered messages.
+|Retrieve the first ... emails|Specify how many emails to retrieve at most 
+|===
+
+**Batch settings**
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Batch size|Number of emails to retrieve at once in one batch
+|Start at message number|The first message number to start retrieving at
+|End at message number|The last message number to end retrieving with 
+|===
+
+
+=== Filters
+
+These are the filters you can set on the header of the mail message. Mail for which the header matches your filter will be retrieved, the rest is ignored.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Sender (FROM)|Only retrieve 
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mailvalidator.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mailvalidator.adoc
new file mode 100644
index 0000000..67bb5ad
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mailvalidator.adoc
@@ -0,0 +1,47 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mailvalidator/src/main/doc/mailvalidator.adoc
+= Mail Validator
+
+== Description
+
+This transform checks if an email address is valid or not. The checking process returns one result field (Boolean or String depending on your settings) and one additionnal field (String) containing errors string when email address is not valid.
+
+This transform must receive input stream and add result fields in this input stream.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|email fieldname|Specify the name of the field that contains the email addresses to check. This field must be defined in a previous transform. Dropdown the combo to select fieldname.
+|SMTP check?|By default, Hop will only check email address structure (using regular expression). If you want to perform a SMTP check, please select this option and fill Email sender (see after). Hop will extract domain name from the supplied email address (to check) and will try to get all exchangers from the domain name. Each exchanger will be queryed.
+|Time out|In order to perform a SMTP check, Hop will open a socket on the target SMTP host. Specify here the socket time out (by default 0).
+|Email sender|If you select "SMTP check?" option, this field is mandatory. Hop will need sender email address to query SMTP host.
+|Default SMTP server|If you know which SMTP server to query, please specify it here, Hop will then query only this one.
+|dynamic default SMTP?|IF you want to pass default SMTP server in a dynamic way, check this option.
+|Default SMTP field|If you select the previous option, you must fill this field. This field must be defined in a previous transform. Dropdown the combo to select fieldname.
+|Result fieldname|Hop will store the result of the process in this field. The result will be Boolean (TRUE = the email address is valid, FALSE = the email address is unvalid) if "Result is a string" option is unchecked (see after).
+Note: This field is mandatory and will be added to the input stream.
+|Result is a string|This option will turn the ouput field into a String and when the email address is valid the output will contains the "Email is valid" field (see after) otherwise it will contains the "Email is not valid" field (see after).
+|Email is valid|If you selected the previous option, you must fill this field
+|Email is not valid|If you selected the previous option, you must fill this field
+|Errors field|When an email is address is unvalid, Hop return the reason. If you want to add it in the input stream, please give the field a name, otherwhise leave this field blank. 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/memgroupby.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/memgroupby.adoc
new file mode 100644
index 0000000..6cbb748
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/memgroupby.adoc
@@ -0,0 +1,41 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/memgroupby/src/main/doc/memgroupby.adoc
+
+= Memory Group By
+
+== Description
+
+The Memory Group By transform builds aggregates in a group by fashion and does not require a sorted input since it processes all rows within memory. When the number of rows is too large to fit into memory, you need to use the combination of the Sort rows and Group by transforms.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform this name has to be unique in a single pipeline,
+|Always give back a result row|If you enable this option, the Group By transform will always give back a result row, even if there is no input row. 
+|This can be useful if you want to count the number of rows.  Without this option you would never get a count of zero (0).
+|The field that make up the group|After retrieving fields using the Get Fields button, designate the fields to include in the group. See the Group be transform for more details.
+|Aggregates|After retrieving fields using the Get lookup fields button, designate the fields to include in the group. See the Group be transform for more details.
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mergejoin.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mergejoin.adoc
new file mode 100644
index 0000000..3567aab
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mergejoin.adoc
@@ -0,0 +1,44 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mergejoin/src/main/doc/mergejoin.adoc
+= Merge Join
+
+== Description
+
+The Merge Join transform performs a classic merge join between data sets with data coming from two different input transforms. Join options include INNER, LEFT OUTER, RIGHT OUTER, and FULL OUTER.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Commit size|The number of rows to change (insert / update) before running a commit.
+|Connection|The database connection to which data is written.
+|Don't perform any updates|If enabled, the values in the database are never updated;only inserts are performed.
+|Key Lookup table|Allows you to specify a list of field values and comparators. You can use the following comparators: =, = ~NULL, <>, <, <=, >, >=, LIKE, BETWEEN, IS NULL, IS NOT NULL
+|SQL button|Click SQL to generate the SQL to create the table and indexes for correct operation.
+|Transform name|Name of the transform; this name has to be unique in a single pipeline.
+|Target schema|The name of the Schema for the table to which data is written. This is important for data sources that allow for table names with periods in them.
+|Target table|Name of the table in which you want to do the insert or update.
+|Update Fields|Allows you to specify all fields in the table you want to insert/update including the keys. Avoid updates on certain fields by specifying N in the update column.
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mergerows.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mergerows.adoc
new file mode 100644
index 0000000..2b7b25b
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mergerows.adoc
@@ -0,0 +1,57 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mergerows/src/main/doc/mergerows.adoc
+= Merge rows (diff)
+
+== Description
+
+The Merge rows (diff) transform compares and merges data within two rows of data. This transform is useful for comparing data collected at two different times. For example, the source system of your data warehouse might not contain a timestamp of the last data update. You could use this transform to compare the two data streams and and merge the dates and timestamps in the rows.
+
+Based on keys for comparison, this transform merges reference rows (previous data) with compare rows (new data) and creates merged output rows. A flag in the row indicates how the values were compared and merged. Flag values include:
+
+* **identical**: The key was found in both rows, and the compared values are identical.
+
+* **changed**: The key was found in both rows, but one or more compared values are different.
+
+* **new**: The key was not found in the reference rows.
+
+* **deleted**: The key was not found in the compare rows.
+
+If the row's flag is **identical** or **deleted**, the merged output rows are based on the reference rows.
+
+For **new** or **changed** rows, the merged output rows are based on the compare rows.
+
+You can also send values from the merged and flagged rows to a subsequent transform in your pipeline, such as the Switch-Case transform or the Synchronize after merge transform. In the subsequent transform, you can use the flag field generated by **Merge rows (diff)** to control updates/inserts/deletes on a target table.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Reference rows origin|Specify the transform origin for the reference rows <- Stream with original rows, or rows you want to compare the new rows to.
+|Compare rows origin|Specify the transform origin for the compare rows.<- Stream with new rows
+|Flag fieldname|Specify the name of the flag field on the output stream.
+|Keys to match|Specify fields containing the keys on which to match;click Get key fields to insert all of the fields originating from the reference rows transform
+|Values to compare|Specify fields contaning the values to compare; click Get value fields to insert all of the fields from the originating value rows transform.  Key fields do not need to be specified here.
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/metainject.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/metainject.adoc
new file mode 100644
index 0000000..4d927af
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/metainject.adoc
@@ -0,0 +1,100 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/metainject/src/main/doc/metainject.adoc
+= Metadata Injection
+
+== Description
+
+Metadata injection inserts data from various sources into a pipeline at runtime. This insertion reduces repetitive ETL tasks.
+
+For example, you might have a simple pipeline to load transaction data values from a supplier, filter specific values, and output them to a file. If you have more than one supplier, you would need to run this simple pipeline for each supplier. Yet, with metadata injection, you can expand this simple repetitive pipeline by inserting metadata from another pipeline that contains the ETL Metadata Injection transform. This transform coordinates the data values from the various inputs through  [...]
+
+The repetitive pipeline is known as the template pipeline. The template pipeline is called by the ETL Metadata Injection transform. You will create a pipeline to prepare what common values you want to use as metadata and inject these specific values through the ETL Metadata Injection transform.
+
+We recommend the following basic procedure for using this transform to inject metadata:
+
+1. Optimize your data for injection, such as preparing folder structures and inputs.
+
+2. Develop pipelines for the repetitive process (the template pipeline), for metadata injection through the ETL Metadata Injection transform, and for handling multiple inputs.
+
+
+The metadata is injected into the template pipeline through any transform that supports metadata injection.
+
+== Options
+
+=== General
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Pipeline|Specify your template pipeline by entering in its path. Click Browse to display and enter the path details using the Virtual File System Browser.
+
+If you select a pipeline that has the same root path as the current pipeline, the variable ${Internal.Transform.Current.Directory} will automatically be inserted in place of the common root path. For example, if the current pipeline's path is /home/admin/pipeline.hpl and you select a pipeline in the folder /home/admin/path/sub.hpl then the path will automatically be converted to ${Internal.Transform.Current.Directory}/path/sub.hpl.
+|===
+
+The ETL Metadata Injection transform features the two tabs with fields. Each tab is described below.
+
+=== Inject Metadata Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Target injection transform key| Lists the available fields in each transform of the template pipeline that can be injected with metadata.
+|Target description|Describes how the target fields relate to their target transforms.
+|Source transform|Lists the transform associated with the fields to be injected into the target fields as metadata.
+|Source field|Lists the fields to be injected into the target fields as metadata.
+|===
+
+To specify the source field as metadata to be injected, perform the following transforms:
+
+1. In the Target injection transform key column, double-click the field for which you want to specify a source field. The Source field dialog box opens.
+
+2. Select a source field and click OK.
+
+3. Optionally, select Use constant value to specify a constant value for the injected metadata through one of the following actions:
+  - Manually entering a value.
+  - Using an internal variable to set the value (${Internal.transform.Unique.Count} for example).
+  - Using a combination of manually specified values and parameter values (${FILE_PREFIX}_${FILE_DATE}.txt for example).
+
+==== Injecting Metadata into the ETL Metadata Injection transform
+
+For injecting metadata into the ETL Metadata Injection transform itself, the following exceptions apply:
+
+
+- To inject a method for how to specify a field (such as by FILENAME), set a PIPELINE_SPECIFICATION_METHOD constant to the field of an input transform. You can then map the field as a source to the PIPELINE_SPECIFICATION_METHOD constant in the ETL Metadata Injection transform.
+
+- The target field for the ETL Metadata Injection transform inserting the metadata into the original injection is defined by [GROUP NAME].[FIELD NAME]. For example, if the GROUP NAME is 'OUTPUT_FIELDS' and the FIELD NAME is 'OUTPUT_FIELDNAME', you would set the target field to 'OUTPUT_FIELDS.OUTPUT_FIELDNAME'.
+
+=== Options Tab
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|transform to read from (optional)|Optionally, select a transform in your template pipeline to pass data directly to a transform following the ETL Metadata Injection transform in your current pipeline.
+|Field name|If transform to read from is selected, enter the name of the field passed directly from the transform in the template pipeline.
+|Type|If transform to read from is selected, select the type of the field passed directly from the transform in the template pipeline.
+|Length|If transform to read from is selected, enter the length of the field passed directly from the transform in the template pipeline.
+|Precision|If transform to read from is selected, enter the precision of the field passed directly from the transform in the template pipeline.
+|Optional target file (hpl after injection)|For initial pipeline development or debugging, specify an optional file for creating and saving a pipeline of your template after metadata injection occurs. The resulting pipeline will be your template pipeline with the metadata already injected as constant values.
+|Streaming source transform|Select a source transform in your current pipeline to directly pass data to the Streaming target transform in the template pipeline.
+|Streaming target transform|Select the target transform in your template pipeline to receive data directly from the Streaming source transform.
+|Run resulting pipeline|Select to inject metadata and run the template pipeline. If this option is not selected, metadata injection occurs, but the template pipeline does not run.
+|===
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/metastructure.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/metastructure.adoc
new file mode 100644
index 0000000..cc04bf3
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/metastructure.adoc
@@ -0,0 +1,35 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/metastructure/src/main/doc/metastructure.adoc
+= Metadata structure of stream
+
+== Description
+
+This transform produces as output the metadata of the input fields of this transform.
+Before producing this output the transform reads and discards (or eats) all input rows.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Output row count|Include the number of rows eaten by this transform in the output
+|Field for row count|The name of the field containing the number of rows eaten
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/monetdbbulkloader.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/monetdbbulkloader.adoc
new file mode 100644
index 0000000..9932902
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/monetdbbulkloader.adoc
@@ -0,0 +1,84 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/monetdbbulkloader/src/main/doc/monetdbbulkloader.adoc
+
+= MonetDB Bulk Loader
+
+== Description
+
+The MonetDB Bulk loader can be used to Bulk load data to MonetDB, this transform greatly improves the spead to load data to MonetDB.
+
+== Options
+
+=== General
+
+Transform name : Specify the unique name of the MongoDB Output transform in the pipeline.
+Connection : Select your MonetDB database connection
+
+=== General Settings tab
+
+This tab contains the destination settings, buffer size and location for the logfile.
+
+|===
+|Field|Description
+
+|Target Schema
+|Specify the database schema that has to be used.
+
+|Target Table
+|Specify the database table, use the Browse button next to this field to use a menu to select the table and schema
+
+|Buffer size (rows)
+|Specify how many rows will be kept in memory before transferring to MonetDB
+
+|Log file
+|Specify the location for the Bulk command logs returned from MonetDB
+
+|Truncate table
+|Remove all data from the destination table before loading the data.
+
+|Fully quote all SQL statements
+|Forces quotes around all objects when executing
+
+|===
+
+=== MonetDB Settings tab
+
+This tab contains information about the temporary files that are generated to load the data.
+
+|===
+|Field|Description
+
+|Field separator
+|This is the separator that will be used in the Bulk copy command, it is not allowed to have this field in the input data.
+
+|Field enclosure
+|The enclosure character used around values.
+
+|Null values represented
+|Null values will be converted to this string, this allows to differentiate empty strings and null values.
+
+|Encoding
+|File encoding used when generating the files for the copy statement.
+
+
+|===
+
+=== Output Fields tab
+This tab contains the source to target mapping.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mongodbinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mongodbinput.adoc
new file mode 100644
index 0000000..ac29e13
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mongodbinput.adoc
@@ -0,0 +1,287 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mongodb/src/main/doc/mondodbinput.adoc
+
+= MongoDB Input
+
+== Description
+
+The MongoDB Input pipeline transform enables you to retrieve http://docs.mongodb.org/manual/reference/glossary/[documents] or http://docs.mongodb.org/manual/reference/glossary/[records] from a collection within MongoDB.
+For additional information about MongoDB, see the MongoDB http://www.mongodb.org/[documentation].
+
+== Options
+
+
+=== General
+
+Transform name : Specify the unique name of the MongoDB Input transform in the pipeline.
+Preview button:  Display the rows generated by this transform. Enter the maximum number of records that you want to preview, then click OK. The preview data appears in the Examine preview data window.
+
+=== Configure Connection tab
+
+image::mongodb-input-screenshot-connection-tab.png[]
+
+|===
+|Field |Description
+
+|Host name(s) or IP address(es)
+|Specify the network name or address of the MongoDB instance or instances. You can also specify a different port number for each host name by separating the host name and port number with a colon. You can input multiple host names or IP addresses, separated by a comma.
+
+|Port
+|Specify the port number of the MongoDB instance or instances. Use this to specify a default port if no ports are given as part of the host name(s) or IP address(es) field. The default value is 27017.
+
+|Enable SSL connection
+|Specify to connect to a MongoDB Server that is configured with SSL.
+
+|Use all replica set members/mongos
+|Select to use all replica sets when multiple hosts are specified in the Host name(s) or IP address(s) field. If a replica set contains more than one host, the Java driver discovers all hosts automatically. The driver connects to the next replica set in the list if the selected set is unavailable.
+
+|Authentication database
+|Specify the authentication database.
+
+|Authenticate Mechanism
+|Select the method used to verify the identity of users. The values are SCRAM-SHA-1, MONGODB-CR and PLAIN.
+
+|Username
+|Specify the username required to access the database. When using Kerberos authentication, enter the Kerberos principal.
+
+|Password
+|Specify the password associated with the username. If you are using Kerberos authentication, you do not need to enter the password.
+
+|Authenticate using Kerberos
+|Select to specify authentication using Kerberos. When selected, enter the Kerberos principal as the Username.
+
+|Connection timeout
+|Specify (in milliseconds) how long to wait for a connection to a database before terminating the connection attempt. Leave blank to never terminate the connection.
+
+|Socket timeout
+|Specify (in milliseconds) how long to wait for a write operation before terminating the operation. Leave blank to never terminate the operation.
+
+|===
+
+=== Input options tab
+
+image::mongodb-input-screenshot-input-tab.png[]
+
+The Input options tab enables you to specify which database and collection you want to retrieve information from. You can also indicate the read preferences and tag sets in this tab.
+
+Enter the following information in the Input options fields:
+
+
+|===
+|Option|Definition
+
+|Database
+|Name of the database to retrieve data from. Click Get DBs to populate the drop-down menu with a list of databases on the server.
+
+|Collection
+|Name of the collection to retrieve data from. Click Get collections to populate the drop-down menu with a list of collections within the database.
+
+|Read preference
+|Specify which node to read first: Primary, Primary preferred, Secondary, Secondary preferred, or Nearest.
+
+|===
+
+==== Tag set specification table
+
+Tags allow you to customize write concerns and read preferences for a replica set. The Tag set specification table allows you to specify criteria for selecting replica set members. See Tag Sets for more information.
+
+Enter the following information in the Tag Set fields:
+
+
+|===
+|Field|Description
+
+|#
+|Indicates the number of the tag set.
+
+|Tag set
+|Displays the tag set criteria. You can join, delete, copy, and paste tag sets, then click Test tag set to see which replica set members match your Tag set specification criteria.
+
+|Get Tags
+|Click Get tags to retrieve a list of tag sets in the source database. Set are listed in order of execution.
+
+|Join tags
+|Click Join tags to append selected tag sets so that nodes matching the criteria are queried or written to simultaneously. If you select individual tag sets, then click Join tags, the tag sets are combined to create one tag set. Note that this change only occurs in the MongoDB Input window, not on the database.
+
+|Test tag set
+|Click Test tag set to display set members that match the tags indicated in the tag set specification. The ID, host name, priority, and tags for each replica set member that matches the tag set specification criteria are displayed.
+
+|===
+
+=== Query tab
+
+image::mongodb-input-screenshot-query-tab.png[]
+
+The Query tab enables you to refine read requests. This tab operates in two different query modes:
+
+* *Query expression* mode (default)
+* *Aggregation pipeline specification* mode.
+
+The *Query is aggregation pipeline* option toggles between these two modes.
+The Query expression uses MongoDB’s JSON-like query language with query operators to perform https://docs.mongodb.com/manual/reference/operator/query/[query operations].
+The *Aggregation pipeline specification* field uses MongoDB’s http://docs.mongodb.org/manual/applications/aggregation/[aggregation framework] to transform and combine documents in a collection.
+An aggregation pipeline connects several https://docs.mongodb.com/manual/core/aggregation-pipeline/#pipeline-expressions[pipeline expressions] together, with the output of the previous expression becoming the input for the next.
+
+Enter the following information in the Query fields:
+
+
+|===
+|Fields/Option |Definition
+
+|Query expression (JSON)
+|Enter a query expression in this field to limit the output.
+
+|Aggregation pipeline specification (JSON)
+|Select the *Query is aggregation pipeline* option to display the *Aggregation pipeline specification (JSON)* field. Then enter a pipeline expression to perform aggregations or selections. The method name, including the collection name of the database you selected in the Input Options tab, appears after the label for this field.
+
+|Query is aggregation pipeline
+|Select this option to use the aggregation pipeline framework.
+
+|Execute for each row
+|Select this option to perform the query on each row of data.
+
+|Fields expression (JSON)
+|Enter an argument to control the projection (fields to return) from a query. If empty, all fields are returned. This field is only available for query expressions.
+
+|===
+
+=== Fields tab
+
+image::mongodb-input-screenshot-fields-tab.png[]
+
+Use the Fields tab to define properties for exported fields.
+The Fields tab operates in two different modes:
+
+1. including all fields in a single JSON field
+2. including selected fields in the output.
+
+If you store the output in a single JSON field, you can parse this JSON using the JSON Input transform, or by using a User Defined Java Class transform.
+
+*Note:* All fields in the Fields tab except the Name of JSON output field are inactive when the Output single JSON field is selected.
+When the Output single JSON field is not selected, the Name of JSON output field is inactive.
+
+General options:
+
+* *The Get fields button*:  Click it to generate a sample set of documents. You can edit the list of field names, paths, and data type for each field in the sample.
+* *Output single JSON field*: Specify that the query results in a single JSON field with the String data type (default).
+* *Name of JSON output field*: Specify the field name of containing the JSON output from the server.
+
+
+Enter the following information in the table if you want to output distinct fields:
+
+
+|===
+|Option| Definition
+
+|#
+|The order of this entry in the list.
+
+|Name
+|The name of the field based on the value in the Path field. The name that appears here maps the name of the field as it appears in the PDI transformation with the field that appears in the MongoDB database. You can edit the name.
+
+|Path
+|Indicates the JSON path of the field in MongoDB. If the path shown is an array, you can specify a specific element of the array by passing it the key value in the bracketed part of the array. For example, $.emails[0] indicates that you want the result to display the first value in the array.
+To display all array values, use the asterisk as the key, like this $.email[*]. If the array contains records, and not just strings, you can specify that you want to display the record like this: $.emails[].sender.
+
+|Type
+|Indicates the data type.
+
+|Indexed values
+|Specify a comma-separated list of legal values for String fields. When you specify values in this field, the Kettle indexed data type is applied to the data. If no values are specified, the String data type is applied. Usually, you will only need to modify this field if you are using Weka metadata for nominal fields.
+
+|Sample: array min: max index
+|Indicates minimum and maximum values for the index in the sampled documents.
+
+|Sample: #occur/#docs
+|Indicates how often the field occurs and the number of documents processed.
+
+|Sample: disparate types
+|Indicates if different data types populate the same field in the sampled documents. When several documents are sampled and the same field contain different data types, the Sample: disparate types field is populated with a Y and the Type field displays the String data type. The Kettle type for the field is set to the String data type, for different output value types.
+
+|===
+
+== Examples
+
+The following sections contain examples of query expressions and aggregate pipelines.
+
+=== Query expression
+
+MongoDB allows you to select and filter documents in a collection using specific fields and values.
+The http://docs.mongodb.org/manual/reference/mongodb-extended-json/[MongoDB Extended JSON] documentation details how to use queries. Pentaho supports only the features discussed on this page.
+
+The following table displays some examples of the syntax and structure of the queries you can use to request data from MongoDB:
+
+
+|===
+|Query expression |Description
+
+|```{ name : "MongoDB" }```
+|Queries all values where the name field has a value equal to MongoDB.
+
+|```{ name : { '$regex' : "m.*", '$options' : "i" } }```
+|Uses a regular expression to find name fields starting with m, case insensitive.
+
+|```{ name : { '$gt' : "M" } }```
+|Searches all strings greater than M.
+
+|```{ name : { '$lte' : "T" } }```
+|Searches all strings less than or equal to T.
+
+|```{ name : { '$in' : [ "MongoDB", "MySQL" ] } }```
+|Finds all names that are either MongoDB or MySQL (Reference).
+
+|```{ name : { '$nin' : [ "MongoDB", "MySQL" ] } }```
+|Finds all names that are not either MongoDB or MySQL, or where the field is not set .
+
+|```{ created_at : { $gte : { $date : "2014-12-31T00:00:00.000Z" } } }```
+|Finds all created_at documents that are greater than or equal to the specified UTC date.
+
+|```{ $where : "this.count == 1" }```
+|Uses JavaScript to evaluate a condition.
+
+|```{ $query: {}, $orderby: { age : -1 } }```
+|Returns all documents in the collection named collection sorted by the age field in descending order.
+
+|===
+
+=== Aggregate pipeline
+
+MongoDB allows you to select and filter documents using the http://docs.mongodb.org/manual/tutorial/aggregation-examples/[aggregation] pipeline framework.
+The Aggregation page in the MongoDB documentation provides additional examples of function calls.
+
+The following table displays some examples of the query syntax and structure you can use to request data from MongoDB:
+
+
+|===
+|Query expression |Description
+
+|```{ $match : {state : "FL", city : "ORLANDO" } }, {$sort : {pop : -1 } }```
+|Returns all fields from all documents where the state field has a value of FL and the city field has a value of ORLANDO. The returned documents will be sorted by the pop field in descending order.
+
+|```{ $group : { _id: "$state"} }, { $sort : { _id : 1 } }```
+|Returns one field named _id containing the distinct values for state in ascending order. This is similar to the SQL statement SELECT DISTINCT state AS _id FROM collection ORDER BY state ASC.
+
+|```{ $match : {state : "FL" } }, { $group: {_id: "$city" , pop: { $sum: "$pop" } } }, { $sort: { pop: -1 } }, { $project: {_id : 0, city : "$_id" } }```
+|Returns all documents where the state field has a value of FL, aggregates all values of pop for each city, sorts by population descending, and returns one field named city.
+
+|```{ $unwind : "$result" }```
+|Peels off the elements of an array individually, and returns one document for each element of the array.
+
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mongodboutput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mongodboutput.adoc
new file mode 100644
index 0000000..4812538
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mongodboutput.adoc
@@ -0,0 +1,350 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mongodb/src/main/doc/mondodboutput.adoc
+
+= MongoDB Output
+
+== Description
+
+The MongoDB Output pipeline transform can output data to a MongoDB database http://docs.mongodb.org/manual/reference/glossary/[collection].
+
+For additional information about MongoDB, see the MongoDB http://www.mongodb.org/[documentation].
+
+== Options
+
+=== General
+
+Transform name : Specify the unique name of the MongoDB Output transform in the pipeline.
+
+=== Configure Connection tab
+
+image::mongodb-output-screenshot-connection-tab.png[]
+
+The *Configure connection* tab enables you to specify the database and collection for your output.
+
+Enter the following information in the transform fields:
+
+
+|===
+|Field|Description
+
+|Host name(s) or IP address(s)
+|Specify the network name or address of the MongoDB instance(s). You can also specify a different port number for each host name by separating the host name and port number with a colon. You can input multiple host names or IP addresses, separated by a comma.
+
+|Port
+|Specify the port number of the MongoDB instance or instances. Use this to specify a default port if no ports are given as part of the host name(s) or IP address(es) field. The default value is 27017.
+
+|Enable SSL connection
+|Specify to connect to a MongoDB Server that is configured with SSL.
+
+|Use all replica set members/mongos
+|Select to use all replica sets when multiple hosts are specified in the Host name(s) or IP address(s) field.
+
+If a replica set contains more than one host, the Java driver discovers all hosts automatically. The driver connects to the next replica set in the list if the set you try to connect to is down.
+
+|Authentication database
+|Specify the authentication database.
+
+|Username
+|Specify the username required to access the database. If you want to use Kerberos authentication, enter the Kerberos principal in this field.
+
+|Password
+|Specify the password associated with the username. If you are using Kerberos authentication, you do not need to enter the password.
+
+|Authenticate Mechanism
+|Select the method used to verify the identity of users. The values are SCRAM-SHA-1 and MONGODB-CR.
+
+|Authenticate using Kerberos
+|Select to specify authentication using Kerberos.
+
+|Connection timeout
+|Specify (in milliseconds) how long to wait for a connection to a database before terminating the connection attempt. Leave blank to never terminate the connection.
+
+|Socket timeout
+|Specify (in milliseconds) how long to wait for a write operation before terminating the operation. Leave blank to never terminate the operation.
+
+|===
+
+=== Output options tab
+
+image::mongodb-output-screenshot-output-tab.png[]
+
+The Output options tab provides additional controls for inserting data into a MongoDB collection. If the specified collection does not exist, it is created before a document is inserted.
+
+Enter the following information in the fields on this tab:
+
+
+|===
+|Option |Description
+
+|Database
+|Specify the target database for the output. When a valid hostname and port has been set, you can click Get DBs to retrieve the names of existing databases within a selected database.
+
+|Collection
+|Specify the target collection for the output. When a valid hostname and port has been set, you can click Get Collections to retrieve the names of existing collections within a selected database. If the specified collection does not exist, it will be created before data is inserted.
+
+|Batch insert size
+|Specify the batch size for bulk insert operations. The default value is 100 rows.
+
+|Truncate collection
+|Select to delete existing data in the target collection before inserting new data.
+
+|Update
+|Sets the update write method for the specified database and collection.
+
+*The Upsert and Modifier update options are not available unless the Update field is selected.*
+
+|Upsert
+|Select to change the write method from insert to upsert. The upsert method replaces a matched record with an entire new record based on all the incoming fields specified in the Mongo document fields tab. A new record is created if match conditions fail for an update.
+
+|Multi-update
+|Select to update all matching documents for each update or upsert operation.
+
+|Modifier update
+|Select to enable modifiers ($ operators) to be used to modify individual fields within matching documents. All matching documents are updated when the Multi-update option is selected.
+
+To update more than one matching document, select Modifier update and Upsert. Selecting Modifier update, Upsert, and Multi-update applies updates to all matching documents, instead of just the first.
+
+|Write concern (w option)
+|Specify the minimum number of servers that must succeed for a write operation. The values are:
+
+-1 : Disables all acknowledgement of write operation errors
+
+0 (Zero) : Disables basic acknowledgment of write operations, but returns information about socket excepts and networking errors
+
+1 : Acknowledges write operations on the primary node
+
+>1 : Wait for successful write operations to the specified number of slaves, including the primary.
+
+Click *Get custom write concerns* to retrieve custom write concerns that you have stored in the repository.
+
+|w Timeout
+|Specify time (in milliseconds) to wait for a response to write operations before terminating the operation. Leave blank to never terminate.
+
+|Journaled writes
+|Select to set write operations to wait until the mongod (the primary daemon process for the MongoDB system) acknowledges the write operation and commits the data to the journal.
+
+|Read preference
+|Specify which node to read first:
+
+- ```Primary```
+
+- ```Primary preferred```
+
+- ```Secondary```
+
+- ```Secondary preferred```
+
+- ```Nearest```
+
+The default is ```Primary```.
+The Read preference is available when Modifier update is selected.
+
+|Number of retries for write operations
+|Specify the number of times that a write operation is attempted.
+
+|Delay, in seconds, between retry attempts
+|Specify the number of seconds to wait before the next retry.
+
+|===
+
+=== Mongo document fields tab
+
+image::mongodb-output-screenshot-fields-tab.png[]
+
+Use the Mongo document fields tab to define how field values coming into the transform are written to a Mongo document.
+The Modifier policy column controls when the execution of a modifier operation affects a particular field.
+You can use modifier policies when the data for one Mongo document is split over several incoming Hop rows or
+when it is not possible to execute different modifier operations that affect the same field simultaneously.
+
+There are 2 helper buttons you can use:
+* *Get fields* :Populates the Name column of the table with the names of the incoming fields.
+* *Preview document structure* : Opens a dialog showing the structure that will be written to MongoDB in JSON format.
+
+Enter the following information in the fields on this tab:
+
+
+|===
+|Column |Field Description
+
+|Name
+|Names of the incoming fields.
+
+|Mongo document path
+|The hierarchical path to fields in a document in dot notation format.
+
+|Use field name
+|Whether to use the incoming field name as the final entry in the path. The values are Y (use incoming field names) and N (do not use incoming field names). When set to Y, a preceding period (.) is assumed.
+
+|NULL values
+|Specifies whether to insert null values in the database. The values are:
+
+- Insert NULL
+
+- Ignore
+
+|JSON
+|Indicates the incoming value is a JSON document.
+
+|Match field for update
+|Indicates whether to match a field when performing an upsert operation. The first document in the collection that matches all fields tagged as Y in this column is replaced with the new document constructed with incoming values for all the defined field paths. If a matching document does not exist, then a new document is inserted into the collection.
+
+|Modifier operation
+|Specify in-place modifications of existing document fields.
+
+The modifiers are:
+
+- N/A
+
+- ```$set``` : Sets the value of a field.
+
+- ```$inc``` : Sets the value of a field if the field does not exist. If the field exists, increases (or decreases, with a negative value) the value of a field.
+
+- ```$push``` : Sets the value of a field if the field does not exist. If the field exists, appends the value of a field.
+
+- ```$``` : (the positional operator for matching inside of arrays).
+
+
+|Modifier policy
+|Controls when execution of a modifier operation affects a field. The values are:
+
+- ```Insert&Update``` : The operation is executed whether or not a match exists in the collection (default).
+The Insert&Update modifier policy (upsert) allows you to specify fields to match when performing an upsert operation.
+Upsert only replaces the first matching document.
+Modifier upserts can be used to replace certain field values in multiple documents.
+
+- ```Insert``` : The operation is executed on an insert only (when the match conditions fail)
+
+- ```Update``` : The operation is executed when match conditions succeed.
+
+|===
+
+==== Example
+
+Here is an example of how you can define a document structure with an arbitrary hierarchy. Use the following input data and document field definitions to create the example document structure in MongoDB:
+
+===== Input data
+
+[source]
+----
+first, last, address, age
+Bob, Jones ,"13 Bob Street", 34
+Fred, Flintstone, "10 Rock Street",50
+Zaphod, Beeblebrox, "Beetlejuice 1", 356
+Noddy,Puppet,"Noddy Land",5
+----
+
+===== Document field definitions
+
+
+|===
+|Name|Mongo document path|Use field name|NULL values|JSON|Match field for update|Modifier operation|Modifier policy
+
+|first
+|top1
+|Y
+|
+|N
+|N
+|N/A
+|Insert&Update
+
+|last
+|array[O]
+|Y
+|
+|N
+|N
+|N/A
+|Insert&Update
+
+|address
+|array[O]
+|Y
+|
+|N
+|N
+|N/A
+|Insert&Update
+
+|age
+|array[O]
+|Y
+|
+|N
+|N
+|N/A
+|Insert&Update
+
+|===
+
+====== Document structure
+
+[source]
+{
+  "top1" : {
+    "first" : "<string val>"
+   },
+  "array" : [ { "last" : "<string val>" , "address" : "<string val>"}],
+  "age" : "<integer val>"
+}
+
+=== Create/drop indexes tab
+
+image::mongodb-output-screenshot-indexes-tab.png[]
+
+Use the Create/drop indexes tab to create and drop indexes on one or more fields.
+Unless unique indexes are being used, MongoDB allows duplicate records to be inserted.
+Indexing is performed after all rows have been processed by the transform.
+
+You can use the *Show indexes button* to display a list of existing indexes.
+
+Enter the following information in the fields in this tab:
+
+
+|===
+|Field|Description
+
+|Index fields
+|Specify a single index (using one field) or a compound index (using multiple fields). Compound indexes are specified by a comma-separated list of paths. Use dot notation to specify the path to a field to use in the index. An optional direction indicator can be specified: 1 for ascending or -1 for descending.
+
+|Index opp
+|Specify whether to create or drop an index.
+
+|Unique
+|Specify whether to index only fields with unique values.
+
+|Sparse
+|Specify whether to index only documents that have the indexed field.
+
+|===
+
+==== Create/drop indexes example
+
+The following options defines the creation of a compound index of the "first" and "age" fields in ascending order:
+
+|===
+|Index fields|Index opp|Unique|Sparse
+
+|top1.first,age
+|Create
+|N
+|N
+
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mqttpublisher.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mqttpublisher.adoc
new file mode 100644
index 0000000..6980778
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mqttpublisher.adoc
@@ -0,0 +1,23 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mqtt/src/main/doc/mqttpublisher.adoc
+= MQTT Publisher
+
+
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mqttsubscriber.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mqttsubscriber.adoc
new file mode 100644
index 0000000..078a532
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/mqttsubscriber.adoc
@@ -0,0 +1,23 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/mqtt/src/main/doc/mqttsubscriber.adoc
+= MQTT Subscriber
+
+
+
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/multimerge.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/multimerge.adoc
new file mode 100644
index 0000000..e3ef014
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/multimerge.adoc
@@ -0,0 +1,38 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/multimerge/src/main/doc/multimerge.adoc
+= Multiway Merge Join
+
+== Description
+
+The Multiway Merge Join transform allows to join from multiple streams.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Join Type|Type of join (INNER or FULL OUTER).
+|===
+
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/normaliser.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/normaliser.adoc
new file mode 100644
index 0000000..df9b6af
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/normaliser.adoc
@@ -0,0 +1,45 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/normaliser/src/main/doc/normaliser.adoc
+
+= Row Normaliser
+
+== Description
+
+The Row Normaliser transform converts the columns of an input stream into rows. You can use this transform to normalize repeating groups of columns.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform this name has to be unique in a single pipeline.
+|Typefield|The name of the type field (product in the example above).
+|Fields table a|
+A list of the fields you want to normalize; you must set the following properties for each selected field:
+
+* Fieldname: Name of the fields to normalize (Product A ? C in the example).
+* Type: Give a string to classify the field (A, B or C in our example).
+* New field: You can give one or more fields where the new value should transferred to (sales in our example).
+|Get Fields|Click to retrieve a list of all fields coming in on the stream(s).
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/nullif.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/nullif.adoc
new file mode 100644
index 0000000..f458b71
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/nullif.adoc
@@ -0,0 +1,29 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/nullif/src/main/doc/nullif.adoc
+
+= Null If
+
+== Description
+
+If the string representation of a certain field is equal to the specified value, then the value is set the null (empty). You can add all fields from the input stream(s) using Get Fields.
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with ETL Metadata Injection to pass metadata to your pipeline at runtime.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/numberrange.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/numberrange.adoc
new file mode 100644
index 0000000..c76645c
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/numberrange.adoc
@@ -0,0 +1,40 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/numberrange/src/main/doc/numberrange.adoc
+
+= Number range
+
+== Description
+
+Create ranges based on numeric fields.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform this name has to be unique in a single pipeline.
+|Input field|Designate the field that gets checked for the lower and upper boundaries.
+|Output field|Designate the output field name that gets filled with the value depending of the input field.
+|Default value|Value to return if there are no matches within the ranges specified.
+|Ranges|Designated the upper and lower bound of a range.
+|Lower Bound|Designated the minimum value of a range.
+|Upper Bound|Designate the upper value of a range.
+|Value|Designated a name for the value. 
+|===
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgbulkloader.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgbulkloader.adoc
new file mode 100644
index 0000000..c20448e
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgbulkloader.adoc
@@ -0,0 +1,75 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/pgbulkloader/src/main/doc/pgbulkloader.adoc
+= PostgreSQL Bulk Loader
+
+== Description
+
+The PostgreSQL bulk loader is a transform in which we will stream data from Hop to the psql command using "COPY DATA FROM STDIN" into the database.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Connection|Name of the database connection on which the target table resides.
+|Target schema|The name of the Schema for the table to write data to. This is important for data sources that allow for table names with dots '.' in it.
+|Target table|Name of the target table.
+|psql path|Full path to the psql utility.
+|Load action|Insert, Truncate. Insert inserts, truncate first truncates the table.
+|Fields to load a|This table contains a list of fields to load data from, properties include:
+
+  * Table field: Table field to be loaded in the PostgreSQL table;
+  * Stream field: Field to be taken from the incoming rows;
+  * Date mask: Either "Pass through, "Date" or "DateTime", determines how date/timestamps will be loaded in PostgreSQL.
+
+|===
+
+== Metadata Injection Support
+
+All fields of this transform support metadata injection. You can use this transform with Metadata Injection to pass metadata to your pipeline at runtime.
+
+== Set Up Authentication
+
+"psql" doesn't allow you to specify the password.  Here is a part of the connection options: 
+[source,bash]
+----
+ Connection options:
+  -h HOSTNAME     database server host or socket directory (default: "/var/run/postgresql")
+  -p PORT         database server port (default: "5432")
+  -U NAME         database user name
+  -W              prompt for password (should happen automatically)
+----
+
+As you can see there is no way to specify a password for the database.  It will always prompt for a password on the console no matter what.
+
+To overcome this you need to set up trusted authentication on the PostgreSQL server.
+
+To make this happen, change the pg_hba.conf file (on my box this is /etc/postgresql/8.2/main/pg_hba.conf) and add a line like this:
+[source,bash]
+----
+host    all         all         192.168.1.0/24        trust
+----
+This basically means that everyone from the 192.168.1.0 network (mask 255.255.255.0) can log into postgres on all databases with any username.  If you are running Hop on the same server, change it to localhost:
+[source,bash]
+----
+host    all         all         127.0.0.1/32        trust
+----
+This is much safer of-course.  Make sure you don't invite any strangers onto your PostgreSQL database!
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgpdecryptstream.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgpdecryptstream.adoc
new file mode 100644
index 0000000..c4f02ba
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgpdecryptstream.adoc
@@ -0,0 +1,39 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/pgp/src/main/doc/pgpdecryptstream.adoc
+= PGP decrypt stream
+
+== Description
+
+The PGP decrypt stream transform decrypts PGP encrypted text.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|GPG location|GPG key location.
+|Browse...|Browse the filesystem for a GPG key location.
+|Passphrase|Passphrase of the key.
+|Read passphrase from field|Whether or not to read the passphrase from a field.
+|Passphrase fieldname|The passphrase field.
+|Data fieldname|The field to decrypt.
+|Result fieldname|The decrypted result field.
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgpencryptstream.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgpencryptstream.adoc
new file mode 100644
index 0000000..b87bfd2
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pgpencryptstream.adoc
@@ -0,0 +1,39 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/pgp/src/main/doc/pgpencryptstream.adoc
+= PGP encrypt stream
+
+== Description
+
+The PGP encrypt stream transform encrypts text using PGP.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|GPG location|GPG key location.
+|Browse...|Browse the filesystem for a GPG key location.
+|Key name|
+|Read keyname from field|Whether or not to read the keyname from a field.
+|Keyname fieldname|The key name field.
+|Data fieldname|The field to encrypt.
+|Result fieldname|The encrypted result field.
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pipelineexcecutor.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pipelineexcecutor.adoc
new file mode 100644
index 0000000..62ddece
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/pipelineexcecutor.adoc
@@ -0,0 +1,87 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/pipelineexecutor/src/main/doc/pipelineexecutor.adoc
+= Pipeline Executor
+
+== Description
+
+The Pipeline Executor transform allows you to execute a Hop pipeline. It is similar to the Workflow Executor transform, but works with pipelines.
+
+Depending on your data transformation needs, the Pipeline Executor transform can be set up to function in any of the following ways:
+
+- By default, the specified pipeline will be executed once for each input row. You can use the input row to set parameters and variables. The executor transform then passes this row to the pipeline in the form of a result row.
+- You can also pass a group of records based on the value in a field, so that when the field value changes dynamically, the specified pipeline is executed. In these cases, the first row in the group of rows is used to set parameters or variables in the pipeline.
+- You can launch multiple copies of this transform to assist in parallel pipeline processing.
+
+== Options
+
+=== General
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Pipeline a|Use this section to specify the pipeline to execute.  You have the following options to specify the pipeline:
+
+- Use a file for the pipeline: when this option is enabled, you can enter the the .hpl file that is to be used as pipeline. The filename may contain variables (for example, you can use the built-in Internal.Pipeline.Filename.Directory variable to construct a filename relative to the current pipeline), or you can use the "Browse" button to select a file using a file browser.
+|===
+
+=== Parameter Tab
+
+In this tab you can specify which field to use to set a certain parameter or variable value. If multiple rows are passed to the workflow, the first row is taken to set the parameters or variables.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Variable / Parameter name|The Parameters tab allows you to define or pass Hop variables down to the pipeline.
+|Field to use|Specify which field to use to set a certain parameter or variable value. If you specify an input field to use, the static input value is not used.
+|Static input value|Instead of a field to use you can specify a static value here.
+|===
+
+If you enable the "Inherit all variables from the pipeline" option, all the variables defined in the parent pipeline are passed to the pipeline.
+
+There is a button in the lower right corner of the tab that will insert all the defined parameters of the specified pipeline. For information the description of the parameter is inserted into the static input value field.
+
+=== Row Grouping Tab
+
+On this tab you can specify the amount of input rows that are passed to the pipeline in the form of result rows. You can use the result rows in a Get rows from result transform in a pipeline.
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|The number of rows to send to the pipeline|after every X rows the pipeline will be executed and these X rows will be passed to the pipeline
+|Field to group rows on|Rows will be accumulated in a group as long as the field value stays the same. If the value
+changes the pipeline will be executed and the accumulated rows will be passed to the pipeline.
+|The time to wait collecting rows before execution|This is time in Milliseconds the transform will spend accumulating rows prior to the execution of the pipeline.
+|===
+
+
+=== Execution Results Tab
+
+You can specify result fields and to which transform to send them.  If you don't need a certain result simply leave a blank input field.
+
+=== Result Rows Tab
+
+In the "Result rows" tab you can specify the layout of the expected result rows of this pipeline and to which transform to send them after execution.
+
+Please note that this transform will verify that the data type of the result row fields are identical to what is specified.  If there is a difference an error will be thrown.
+
+=== Result Files Tab
+
+Here you can specify where to send the result files from the pipeline execution.
\ No newline at end of file
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/processfiles.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/processfiles.adoc
new file mode 100644
index 0000000..cfca647
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/processfiles.adoc
@@ -0,0 +1,41 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
+"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+KIND, either express or implied.  See the License for the
+specific language governing permissions and limitations
+under the License.
+////
+:documentationPath: /pipeline/transforms/
+:language: en_US
+:page-alternativeEditUrl: https://github.com/apache/incubator-hop/edit/master/pipeline/transforms/processfiles/src/main/doc/processfiles.adoc
+= Process files
+
+== Description
+
+This transform copies, moves or deletes files by giving the source and target file names in the data stream.
+
+== Options
+
+[width="90%", options="header"]
+|===
+|Option|Description
+|Transform name|Name of the transform.
+|Operation|This defines the operation: Copy, Move or Delete
+|Create target parent folder|When this option is checked, the target parent folder is created.
+Otherwise it throws an exception when the target parent folder is not existing.
+|Overwrite target file|When this option is checked and the target file exists, it will be overwritten.
+Otherwise it will not be overwritten and silently ignored.
+|Add target filename to result|When this option is checked, add the copied, moved or deleted files to the list of result files.
+|Set simulation mode|For testing purposes: No action will be taken at the end and no file will be copied, moved or deleted.
+|Source filename field|This defined the field that contains the complete path to the source file for copy or move or the file to be deleted.
+|Target filename field|This defined the field that contains the complete path to the target file. This field must not be given in the Delete mode. 
+|===
diff --git a/hop-user-manual/modules/ROOT/pages/pipeline/transforms/propertyinput.adoc b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/propertyinput.adoc
new file mode 100644
index 0000000..b10f8a0
--- /dev/null
+++ b/hop-user-manual/modules/ROOT/pages/pipeline/transforms/propertyinput.adoc
@@ -0,0 +1,76 @@
+////
+Licensed to the Apache Software Foundation (ASF) under one
+or more contributor license agreements.  See the NOTICE file
+distributed with this work for additional information
+regarding copyright ownership.  The ASF licenses this file
+to you under the Apache License, Version 2.0 (the
+"License"); you may not use this file except in compliance
+with the License.  You may obtain a copy of the License at
+  http://www.apache.org/licenses/LICENSE-2.0
+Unless required by applicable law or agreed to in writing,
+software distributed under the License is distributed on an
... 7989 lines suppressed ...