You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hop.apache.org by mc...@apache.org on 2021/06/20 21:56:52 UTC

[incubator-hop] branch master updated: HOP-2975: add MDI support and cleanup meta

This is an automated email from the ASF dual-hosted git repository.

mcasters pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/incubator-hop.git


The following commit(s) were added to refs/heads/master by this push:
     new 909d93f  HOP-2975: add MDI support and cleanup meta
     new 57940e6  Merge pull request #887 from hansva/master
909d93f is described below

commit 909d93f442be01e9491c96602d391e67dbf147d1
Author: Hans Van Akelyen <ha...@gmail.com>
AuthorDate: Sun Jun 20 18:37:27 2021 +0200

    HOP-2975: add MDI support and cleanup meta
---
 .../mdi/0006-row-denormaliser-child.hpl            | 175 +++++++++
 .../mdi/0006-row-denormaliser-parent.hpl           | 349 ++++++++++++++++++
 .../mdi/datasets/golden-row-denormaliser.csv       |   3 +
 .../mdi/main-0006-row-denormaliser.hwf             |  79 ++++
 .../metadata/dataset/golden-row-denormaliser.json  |  56 +++
 .../unit-test/0006-filter-parent UNIT.json         |  28 --
 .../0006-row-denormaliser-parent UNIT.json         |  48 +++
 .../transforms/denormaliser/Denormaliser.java      | 356 +++++++++---------
 .../transforms/denormaliser/DenormaliserData.java  |   1 -
 .../denormaliser/DenormaliserDialog.java           | 109 +++---
 .../denormaliser/DenormaliserGroupField.java       |  61 +++
 .../transforms/denormaliser/DenormaliserMeta.java  | 221 +++--------
 .../denormaliser/DenormaliserTargetField.java      | 409 ++++++++++++---------
 .../denormaliser/DenormaliserAggregationsTest.java | 142 +++----
 ...DenormaliserTest.java => DenormaliserTest.java} |  64 ++--
 .../denormaliser/DenormalizerMetaTest.java         | 137 -------
 16 files changed, 1397 insertions(+), 841 deletions(-)

diff --git a/integration-tests/mdi/0006-row-denormaliser-child.hpl b/integration-tests/mdi/0006-row-denormaliser-child.hpl
new file mode 100644
index 0000000..5e8d0ef
--- /dev/null
+++ b/integration-tests/mdi/0006-row-denormaliser-child.hpl
@@ -0,0 +1,175 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+-->
+<pipeline>
+  <info>
+    <name>0006-row-denormaliser-child</name>
+    <name_sync_with_filename>Y</name_sync_with_filename>
+    <description/>
+    <extended_description/>
+    <pipeline_version/>
+    <pipeline_type>Normal</pipeline_type>
+    <parameters>
+    </parameters>
+    <capture_transform_performance>N</capture_transform_performance>
+    <transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
+    <transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
+    <created_user>-</created_user>
+    <created_date>2021/05/31 11:45:59.943</created_date>
+    <modified_user>-</modified_user>
+    <modified_date>2021/05/31 11:45:59.943</modified_date>
+    <key_for_session_key>H4sIAAAAAAAAAAMAAAAAAAAAAAA=</key_for_session_key>
+    <is_key_private>N</is_key_private>
+  </info>
+  <notepads>
+  </notepads>
+  <order>
+    <hop>
+      <from>Input data</from>
+      <to>Row denormaliser: colors</to>
+      <enabled>Y</enabled>
+    </hop>
+    <hop>
+      <from>Row denormaliser: colors</from>
+      <to>Verify</to>
+      <enabled>Y</enabled>
+    </hop>
+  </order>
+  <transform>
+    <name>Input data</name>
+    <type>DataGrid</type>
+    <description/>
+    <distribute>Y</distribute>
+    <custom_distribution/>
+    <copies>1</copies>
+    <partitioning>
+      <method>none</method>
+      <schema_name/>
+    </partitioning>
+    <fields>
+      <field>
+        <name>id</name>
+        <type>Integer</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+      <field>
+        <name>colorCode</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+      <field>
+        <name>colorValue</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+    </fields>
+    <data>
+      <line>
+        <item>1</item>
+        <item>white</item>
+        <item>0xFFFFFF</item>
+      </line>
+      <line>
+        <item>1</item>
+        <item>black</item>
+        <item>0x000000</item>
+      </line>
+      <line>
+        <item>2</item>
+        <item>red</item>
+        <item>0xFF0000</item>
+      </line>
+      <line>
+        <item>2</item>
+        <item>green</item>
+        <item>0x00FF00</item>
+      </line>
+      <line>
+        <item>2</item>
+        <item>blue</item>
+        <item>0X0000FF</item>
+      </line>
+    </data>
+    <attributes/>
+    <GUI>
+      <xloc>144</xloc>
+      <yloc>128</yloc>
+    </GUI>
+  </transform>
+  <transform>
+    <name>Row denormaliser: colors</name>
+    <type>Denormaliser</type>
+    <description/>
+    <distribute>Y</distribute>
+    <custom_distribution/>
+    <copies>1</copies>
+    <partitioning>
+      <method>none</method>
+      <schema_name/>
+    </partitioning>
+    <fields>
+</fields>
+    <group>
+</group>
+    <key_field>colorValue</key_field>
+    <attributes/>
+    <GUI>
+      <xloc>304</xloc>
+      <yloc>128</yloc>
+    </GUI>
+  </transform>
+  <transform>
+    <name>Verify</name>
+    <type>Dummy</type>
+    <description/>
+    <distribute>Y</distribute>
+    <custom_distribution/>
+    <copies>1</copies>
+    <partitioning>
+      <method>none</method>
+      <schema_name/>
+    </partitioning>
+    <attributes/>
+    <GUI>
+      <xloc>480</xloc>
+      <yloc>128</yloc>
+    </GUI>
+  </transform>
+  <transform_error_handling>
+  </transform_error_handling>
+  <attributes/>
+</pipeline>
diff --git a/integration-tests/mdi/0006-row-denormaliser-parent.hpl b/integration-tests/mdi/0006-row-denormaliser-parent.hpl
new file mode 100644
index 0000000..a49836f
--- /dev/null
+++ b/integration-tests/mdi/0006-row-denormaliser-parent.hpl
@@ -0,0 +1,349 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+-->
+<pipeline>
+  <info>
+    <name>0006-row-denormaliser-parent</name>
+    <name_sync_with_filename>Y</name_sync_with_filename>
+    <description/>
+    <extended_description/>
+    <pipeline_version/>
+    <pipeline_type>Normal</pipeline_type>
+    <parameters>
+    </parameters>
+    <capture_transform_performance>N</capture_transform_performance>
+    <transform_performance_capturing_delay>1000</transform_performance_capturing_delay>
+    <transform_performance_capturing_size_limit>100</transform_performance_capturing_size_limit>
+    <created_user>-</created_user>
+    <created_date>2021/05/11 12:23:03.890</created_date>
+    <modified_user>-</modified_user>
+    <modified_date>2021/05/11 12:23:03.890</modified_date>
+    <key_for_session_key>H4sIAAAAAAAAAAMAAAAAAAAAAAA=</key_for_session_key>
+    <is_key_private>N</is_key_private>
+  </info>
+  <notepads>
+  </notepads>
+  <order>
+    <hop>
+      <from>0006-row-denormaliser-child.hpl</from>
+      <to>Verify</to>
+      <enabled>Y</enabled>
+    </hop>
+    <hop>
+      <from>target fields</from>
+      <to>0006-row-denormaliser-child.hpl</to>
+      <enabled>Y</enabled>
+    </hop>
+    <hop>
+      <from>group/key field</from>
+      <to>0006-row-denormaliser-child.hpl</to>
+      <enabled>Y</enabled>
+    </hop>
+  </order>
+  <transform>
+    <name>0006-row-denormaliser-child.hpl</name>
+    <type>MetaInject</type>
+    <description/>
+    <distribute>Y</distribute>
+    <custom_distribution/>
+    <copies>1</copies>
+    <partitioning>
+      <method>none</method>
+      <schema_name/>
+    </partitioning>
+    <filename>${PROJECT_HOME}/0006-row-denormaliser-child.hpl</filename>
+    <source_transform>Verify</source_transform>
+    <source_output_fields>
+      <source_output_field>
+        <source_output_field_name>id</source_output_field_name>
+        <source_output_field_type>Integer</source_output_field_type>
+        <source_output_field_length>-1</source_output_field_length>
+        <source_output_field_precision>-1</source_output_field_precision>
+      </source_output_field>
+      <source_output_field>
+        <source_output_field_name>colorWhite</source_output_field_name>
+        <source_output_field_type>String</source_output_field_type>
+        <source_output_field_length>-1</source_output_field_length>
+        <source_output_field_precision>-1</source_output_field_precision>
+      </source_output_field>
+      <source_output_field>
+        <source_output_field_name>colorBlack</source_output_field_name>
+        <source_output_field_type>String</source_output_field_type>
+        <source_output_field_length>-1</source_output_field_length>
+        <source_output_field_precision>-1</source_output_field_precision>
+      </source_output_field>
+      <source_output_field>
+        <source_output_field_name>colorRed</source_output_field_name>
+        <source_output_field_type>String</source_output_field_type>
+        <source_output_field_length>-1</source_output_field_length>
+        <source_output_field_precision>-1</source_output_field_precision>
+      </source_output_field>
+      <source_output_field>
+        <source_output_field_name>colorGreen</source_output_field_name>
+        <source_output_field_type>String</source_output_field_type>
+        <source_output_field_length>-1</source_output_field_length>
+        <source_output_field_precision>-1</source_output_field_precision>
+      </source_output_field>
+      <source_output_field>
+        <source_output_field_name>colorBlue</source_output_field_name>
+        <source_output_field_type>String</source_output_field_type>
+        <source_output_field_length>-1</source_output_field_length>
+        <source_output_field_precision>-1</source_output_field_precision>
+      </source_output_field>
+    </source_output_fields>
+    <target_file/>
+    <no_execution>N</no_execution>
+    <stream_source_transform/>
+    <stream_target_transform/>
+    <mappings>
+      <mapping>
+        <target_transform_name>Row denormaliser: colors</target_transform_name>
+        <target_attribute_key>TARGET_AGGREGATION</target_attribute_key>
+        <target_detail>Y</target_detail>
+        <source_transform>target fields</source_transform>
+        <source_field>aggregation type</source_field>
+      </mapping>
+      <mapping>
+        <target_transform_name>Row denormaliser: colors</target_transform_name>
+        <target_attribute_key>key_field</target_attribute_key>
+        <target_detail>N</target_detail>
+        <source_transform>group/key field</source_transform>
+        <source_field>key field</source_field>
+      </mapping>
+      <mapping>
+        <target_transform_name>Row denormaliser: colors</target_transform_name>
+        <target_attribute_key>TARGET_NAME</target_attribute_key>
+        <target_detail>Y</target_detail>
+        <source_transform>target fields</source_transform>
+        <source_field>target field</source_field>
+      </mapping>
+      <mapping>
+        <target_transform_name>Row denormaliser: colors</target_transform_name>
+        <target_attribute_key>name</target_attribute_key>
+        <target_detail>Y</target_detail>
+        <source_transform>group/key field</source_transform>
+        <source_field>group field</source_field>
+      </mapping>
+      <mapping>
+        <target_transform_name>Row denormaliser: colors</target_transform_name>
+        <target_attribute_key>NAME</target_attribute_key>
+        <target_detail>Y</target_detail>
+        <source_transform>target fields</source_transform>
+        <source_field>value field</source_field>
+      </mapping>
+      <mapping>
+        <target_transform_name>Row denormaliser: colors</target_transform_name>
+        <target_attribute_key>TARGET_TYPE</target_attribute_key>
+        <target_detail>Y</target_detail>
+        <source_transform>target fields</source_transform>
+        <source_field>type</source_field>
+      </mapping>
+      <mapping>
+        <target_transform_name>Row denormaliser: colors</target_transform_name>
+        <target_attribute_key>KEY_VALUE</target_attribute_key>
+        <target_detail>N</target_detail>
+        <source_transform>target fields</source_transform>
+        <source_field>key value</source_field>
+      </mapping>
+    </mappings>
+    <attributes/>
+    <GUI>
+      <xloc>272</xloc>
+      <yloc>64</yloc>
+    </GUI>
+  </transform>
+  <transform>
+    <name>Verify</name>
+    <type>Dummy</type>
+    <description/>
+    <distribute>Y</distribute>
+    <custom_distribution/>
+    <copies>1</copies>
+    <partitioning>
+      <method>none</method>
+      <schema_name/>
+    </partitioning>
+    <attributes/>
+    <GUI>
+      <xloc>432</xloc>
+      <yloc>64</yloc>
+    </GUI>
+  </transform>
+  <transform>
+    <name>group/key field</name>
+    <type>DataGrid</type>
+    <description/>
+    <distribute>Y</distribute>
+    <custom_distribution/>
+    <copies>1</copies>
+    <partitioning>
+      <method>none</method>
+      <schema_name/>
+    </partitioning>
+    <fields>
+      <field>
+        <name>key field</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+      <field>
+        <name>group field</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+    </fields>
+    <data>
+      <line>
+        <item>colorCode</item>
+        <item>id</item>
+      </line>
+    </data>
+    <attributes/>
+    <GUI>
+      <xloc>96</xloc>
+      <yloc>160</yloc>
+    </GUI>
+  </transform>
+  <transform>
+    <name>target fields</name>
+    <type>DataGrid</type>
+    <description/>
+    <distribute>Y</distribute>
+    <custom_distribution/>
+    <copies>1</copies>
+    <partitioning>
+      <method>none</method>
+      <schema_name/>
+    </partitioning>
+    <fields>
+      <field>
+        <name>target field</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+      <field>
+        <name>value field</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+      <field>
+        <name>key value</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+      <field>
+        <name>type</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+      <field>
+        <name>aggregation type</name>
+        <type>String</type>
+        <format/>
+        <currency/>
+        <decimal/>
+        <group/>
+        <length>-1</length>
+        <precision>-1</precision>
+        <set_empty_string>N</set_empty_string>
+      </field>
+    </fields>
+    <data>
+      <line>
+        <item>colorWhite</item>
+        <item>colorValue</item>
+        <item>white</item>
+        <item>String</item>
+        <item>TYPE_AGGR_NONE</item>
+      </line>
+      <line>
+        <item>colorBlack</item>
+        <item>colorValue</item>
+        <item>black</item>
+        <item>String</item>
+        <item>TYPE_AGGR_NONE</item>
+      </line>
+      <line>
+        <item>colorRed</item>
+        <item>colorValue</item>
+        <item>red</item>
+        <item>String</item>
+        <item>TYPE_AGGR_NONE</item>
+      </line>
+      <line>
+        <item>colorGreen</item>
+        <item>colorValue</item>
+        <item>green</item>
+        <item>String</item>
+        <item>TYPE_AGGR_NONE</item>
+      </line>
+      <line>
+        <item>colorBlue</item>
+        <item>colorValue</item>
+        <item>blue</item>
+        <item>String</item>
+        <item>TYPE_AGGR_NONE</item>
+      </line>
+    </data>
+    <attributes/>
+    <GUI>
+      <xloc>96</xloc>
+      <yloc>64</yloc>
+    </GUI>
+  </transform>
+  <transform_error_handling>
+  </transform_error_handling>
+  <attributes/>
+</pipeline>
diff --git a/integration-tests/mdi/datasets/golden-row-denormaliser.csv b/integration-tests/mdi/datasets/golden-row-denormaliser.csv
new file mode 100644
index 0000000..2800fdb
--- /dev/null
+++ b/integration-tests/mdi/datasets/golden-row-denormaliser.csv
@@ -0,0 +1,3 @@
+id,colorWhite,colorBlack,colorRed,colorGreen,colorBlue
+1,0xFFFFFF,0x000000,,,
+2,,,0xFF0000,0x00FF00,0X0000FF
diff --git a/integration-tests/mdi/main-0006-row-denormaliser.hwf b/integration-tests/mdi/main-0006-row-denormaliser.hwf
new file mode 100644
index 0000000..958c220
--- /dev/null
+++ b/integration-tests/mdi/main-0006-row-denormaliser.hwf
@@ -0,0 +1,79 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements.  See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License.  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+
+-->
+<workflow>
+  <name>main-0006-row-denormaliser</name>
+  <name_sync_with_filename>Y</name_sync_with_filename>
+  <description/>
+  <extended_description/>
+  <workflow_version/>
+  <created_user>-</created_user>
+  <created_date>2021/05/10 10:49:19.324</created_date>
+  <modified_user>-</modified_user>
+  <modified_date>2021/05/10 10:49:19.324</modified_date>
+  <parameters>
+    </parameters>
+  <actions>
+    <action>
+      <name>Start</name>
+      <description/>
+      <type>SPECIAL</type>
+      <attributes/>
+      <repeat>N</repeat>
+      <schedulerType>0</schedulerType>
+      <intervalSeconds>0</intervalSeconds>
+      <intervalMinutes>60</intervalMinutes>
+      <hour>12</hour>
+      <minutes>0</minutes>
+      <weekDay>1</weekDay>
+      <DayOfMonth>1</DayOfMonth>
+      <parallel>N</parallel>
+      <xloc>112</xloc>
+      <yloc>96</yloc>
+      <attributes_hac/>
+    </action>
+    <action>
+      <name>Run Row denormaliser Unit Tests</name>
+      <description/>
+      <type>RunPipelineTests</type>
+      <attributes/>
+      <test_names>
+        <test_name>
+          <name>0006-row-denormaliser-parent UNIT</name>
+        </test_name>
+      </test_names>
+      <parallel>N</parallel>
+      <xloc>320</xloc>
+      <yloc>96</yloc>
+      <attributes_hac/>
+    </action>
+  </actions>
+  <hops>
+    <hop>
+      <from>Start</from>
+      <to>Run Row denormaliser Unit Tests</to>
+      <enabled>Y</enabled>
+      <evaluation>Y</evaluation>
+      <unconditional>Y</unconditional>
+    </hop>
+  </hops>
+  <notepads>
+  </notepads>
+  <attributes/>
+</workflow>
diff --git a/integration-tests/mdi/metadata/dataset/golden-row-denormaliser.json b/integration-tests/mdi/metadata/dataset/golden-row-denormaliser.json
new file mode 100644
index 0000000..9b3dd18
--- /dev/null
+++ b/integration-tests/mdi/metadata/dataset/golden-row-denormaliser.json
@@ -0,0 +1,56 @@
+{
+  "base_filename": "golden-row-denormaliser.csv",
+  "name": "golden-row-denormaliser",
+  "description": "",
+  "dataset_fields": [
+    {
+      "field_comment": "",
+      "field_length": -1,
+      "field_type": 5,
+      "field_precision": 0,
+      "field_format": "####0;-####0",
+      "field_name": "id"
+    },
+    {
+      "field_comment": "",
+      "field_length": -1,
+      "field_type": 2,
+      "field_precision": -1,
+      "field_format": "",
+      "field_name": "colorWhite"
+    },
+    {
+      "field_comment": "",
+      "field_length": -1,
+      "field_type": 2,
+      "field_precision": -1,
+      "field_format": "",
+      "field_name": "colorBlack"
+    },
+    {
+      "field_comment": "",
+      "field_length": -1,
+      "field_type": 2,
+      "field_precision": -1,
+      "field_format": "",
+      "field_name": "colorRed"
+    },
+    {
+      "field_comment": "",
+      "field_length": -1,
+      "field_type": 2,
+      "field_precision": -1,
+      "field_format": "",
+      "field_name": "colorGreen"
+    },
+    {
+      "field_comment": "",
+      "field_length": -1,
+      "field_type": 2,
+      "field_precision": -1,
+      "field_format": "",
+      "field_name": "colorBlue"
+    }
+  ],
+  "folder_name": ""
+}
\ No newline at end of file
diff --git a/integration-tests/mdi/metadata/unit-test/0006-filter-parent UNIT.json b/integration-tests/mdi/metadata/unit-test/0006-filter-parent UNIT.json
deleted file mode 100644
index 1a09c8b..0000000
--- a/integration-tests/mdi/metadata/unit-test/0006-filter-parent UNIT.json	
+++ /dev/null
@@ -1,28 +0,0 @@
-{
-  "variableValues": [],
-  "database_replacements": [],
-  "autoOpening": true,
-  "basePath": "",
-  "golden_data_sets": [
-    {
-      "field_mappings": [
-        {
-          "transform_field": "id",
-          "data_set_field": "id"
-        }
-      ],
-      "field_order": [
-        "id"
-      ],
-      "transform_name": "Verify",
-      "data_set_name": "golden-filter-parent"
-    }
-  ],
-  "input_data_sets": [],
-  "name": "0006-filter-parent UNIT",
-  "description": "",
-  "trans_test_tweaks": [],
-  "persist_filename": "",
-  "pipeline_filename": "./0006-filter-parent.hpl",
-  "test_type": "UNIT_TEST"
-}
\ No newline at end of file
diff --git a/integration-tests/mdi/metadata/unit-test/0006-row-denormaliser-parent UNIT.json b/integration-tests/mdi/metadata/unit-test/0006-row-denormaliser-parent UNIT.json
new file mode 100644
index 0000000..bc9d23e
--- /dev/null
+++ b/integration-tests/mdi/metadata/unit-test/0006-row-denormaliser-parent UNIT.json	
@@ -0,0 +1,48 @@
+{
+  "variableValues": [],
+  "database_replacements": [],
+  "autoOpening": true,
+  "basePath": "",
+  "golden_data_sets": [
+    {
+      "field_mappings": [
+        {
+          "transform_field": "colorBlack",
+          "data_set_field": "colorBlack"
+        },
+        {
+          "transform_field": "colorBlue",
+          "data_set_field": "colorBlue"
+        },
+        {
+          "transform_field": "colorGreen",
+          "data_set_field": "colorGreen"
+        },
+        {
+          "transform_field": "colorRed",
+          "data_set_field": "colorRed"
+        },
+        {
+          "transform_field": "colorWhite",
+          "data_set_field": "colorWhite"
+        },
+        {
+          "transform_field": "id",
+          "data_set_field": "id"
+        }
+      ],
+      "field_order": [
+        "id"
+      ],
+      "transform_name": "Verify",
+      "data_set_name": "golden-row-denormaliser"
+    }
+  ],
+  "input_data_sets": [],
+  "name": "0006-row-denormaliser-parent UNIT",
+  "description": "",
+  "trans_test_tweaks": [],
+  "persist_filename": "",
+  "pipeline_filename": "./0006-row-denormaliser-parent.hpl",
+  "test_type": "UNIT_TEST"
+}
\ No newline at end of file
diff --git a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/Denormaliser.java b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/Denormaliser.java
index cd15194..9df1b12 100644
--- a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/Denormaliser.java
+++ b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/Denormaliser.java
@@ -27,6 +27,7 @@ import org.apache.hop.core.row.RowDataUtil;
 import org.apache.hop.core.row.ValueDataUtil;
 import org.apache.hop.core.row.value.ValueMetaBase;
 import org.apache.hop.core.row.value.ValueMetaDate;
+import org.apache.hop.core.row.value.ValueMetaFactory;
 import org.apache.hop.core.row.value.ValueMetaInteger;
 import org.apache.hop.core.util.Utils;
 import org.apache.hop.i18n.BaseMessages;
@@ -45,13 +46,9 @@ import java.util.List;
 import java.util.Map;
 import java.util.Set;
 
-/**
- * Denormalises data based on key-value pairs
- *
- * @author Matt
- * @since 17-jan-2006
- */
-public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserData> implements ITransform<DenormaliserMeta, DenormaliserData> {
+/** Denormalises data based on key-value pairs */
+public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserData>
+    implements ITransform<DenormaliserMeta, DenormaliserData> {
 
   private static final Class<?> PKG = DenormaliserMeta.class; // For Translator
 
@@ -60,31 +57,36 @@ public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserDa
 
   private Map<String, IValueMeta> conversionMetaCache = new HashMap<>();
 
-  public Denormaliser( TransformMeta transformMeta, DenormaliserMeta meta, DenormaliserData data, int copyNr, PipelineMeta pipelineMeta,
-                       Pipeline pipeline ) {
-    super( transformMeta, meta, data, copyNr, pipelineMeta, pipeline );
+  public Denormaliser(
+      TransformMeta transformMeta,
+      DenormaliserMeta meta,
+      DenormaliserData data,
+      int copyNr,
+      PipelineMeta pipelineMeta,
+      Pipeline pipeline) {
+    super(transformMeta, meta, data, copyNr, pipelineMeta, pipeline);
   }
 
   @Override
   public boolean processRow() throws HopException {
     Object[] r = getRow(); // get row!
 
-    if ( r == null ) {
+    if (r == null) {
       // no more input to be expected...
       handleLastRow();
       setOutputDone();
       return false;
     }
 
-    if ( first ) {
+    if (first) {
       // perform all allocations
-      if ( !processFirstRow() ) {
+      if (!processFirstRow()) {
         // we failed on first row....
         return false;
       }
 
       newGroup(); // Create a new result row (init)
-      deNormalise( data.inputRowMeta, r );
+      deNormalise(data.inputRowMeta, r);
       data.previous = r; // copy the row to previous
 
       // we don't need feedback here
@@ -94,114 +96,120 @@ public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserDa
       return true;
     }
 
-    if ( !sameGroup( data.inputRowMeta, data.previous, r ) ) {
+    if (!sameGroup(data.inputRowMeta, data.previous, r)) {
 
-      Object[] outputRowData = buildResult( data.inputRowMeta, data.previous );
-      putRow( data.outputRowMeta, outputRowData ); // copy row to possible alternate rowset(s).
+      Object[] outputRowData = buildResult(data.inputRowMeta, data.previous);
+      putRow(data.outputRowMeta, outputRowData); // copy row to possible alternate rowset(s).
       newGroup(); // Create a new group aggregate (init)
-      deNormalise( data.inputRowMeta, r );
+      deNormalise(data.inputRowMeta, r);
     } else {
-      deNormalise( data.inputRowMeta, r );
+      deNormalise(data.inputRowMeta, r);
     }
 
     data.previous = r;
 
-    if ( checkFeedback( getLinesRead() ) ) {
-      if ( log.isBasic() ) {
-        logBasic( BaseMessages.getString( PKG, "Denormaliser.Log.LineNumber" ) + getLinesRead() );
-      }
+    if (checkFeedback(getLinesRead()) && log.isBasic()) {
+      logBasic(BaseMessages.getString(PKG, "Denormaliser.Log.LineNumber") + getLinesRead());
     }
 
     return true;
   }
 
   private boolean processFirstRow() throws HopTransformException {
-    String val = getVariable( Const.HOP_AGGREGATION_ALL_NULLS_ARE_ZERO, "N" );
-    this.allNullsAreZero = ValueMetaBase.convertStringToBoolean( val );
-    val = getVariable( Const.HOP_AGGREGATION_MIN_NULL_IS_VALUED, "N" );
-    this.minNullIsValued = ValueMetaBase.convertStringToBoolean( val );
+    String val = getVariable(Const.HOP_AGGREGATION_ALL_NULLS_ARE_ZERO, "N");
+    this.allNullsAreZero = ValueMetaBase.convertStringToBoolean(val);
+    val = getVariable(Const.HOP_AGGREGATION_MIN_NULL_IS_VALUED, "N");
+    this.minNullIsValued = ValueMetaBase.convertStringToBoolean(val);
     data.inputRowMeta = getInputRowMeta();
     data.outputRowMeta = data.inputRowMeta.clone();
-    meta.getFields( data.outputRowMeta, getTransformName(), null, null, this, metadataProvider );
+    meta.getFields(data.outputRowMeta, getTransformName(), null, null, this, metadataProvider);
 
-    data.keyFieldNr = data.inputRowMeta.indexOfValue( meta.getKeyField() );
-    if ( data.keyFieldNr < 0 ) {
-      logError( BaseMessages.getString( PKG, "Denormaliser.Log.KeyFieldNotFound", meta.getKeyField() ) );
-      setErrors( 1 );
+    data.keyFieldNr = data.inputRowMeta.indexOfValue(meta.getKeyField());
+    if (data.keyFieldNr < 0) {
+      logError(
+          BaseMessages.getString(PKG, "Denormaliser.Log.KeyFieldNotFound", meta.getKeyField()));
+      setErrors(1);
       stopAll();
       return false;
     }
 
     Map<Integer, Integer> subjects = new Hashtable<>();
-    data.fieldNameIndex = new int[ meta.getDenormaliserTargetField().length ];
-    for ( int i = 0; i < meta.getDenormaliserTargetField().length; i++ ) {
-      DenormaliserTargetField field = meta.getDenormaliserTargetField()[ i ];
-      int idx = data.inputRowMeta.indexOfValue( field.getFieldName() );
-      if ( idx < 0 ) {
-        logError( BaseMessages.getString( PKG, "Denormaliser.Log.UnpivotFieldNotFound", field.getFieldName() ) );
-        setErrors( 1 );
+    data.fieldNameIndex = new int[meta.getDenormaliserTargetFields().size()];
+    for (int i = 0; i < meta.getDenormaliserTargetFields().size(); i++) {
+      DenormaliserTargetField field = meta.getDenormaliserTargetFields().get(i);
+      int idx = data.inputRowMeta.indexOfValue(field.getFieldName());
+      if (idx < 0) {
+        logError(
+            BaseMessages.getString(
+                PKG, "Denormaliser.Log.UnpivotFieldNotFound", field.getFieldName()));
+        setErrors(1);
         stopAll();
         return false;
       }
-      data.fieldNameIndex[ i ] = idx;
-      subjects.put( Integer.valueOf( idx ), Integer.valueOf( idx ) );
+      data.fieldNameIndex[i] = idx;
+      subjects.put(Integer.valueOf(idx), Integer.valueOf(idx));
 
       // See if by accident, the value fieldname isn't the same as the key fieldname.
       // This is not supported of-course and given the complexity of the transform, you can miss:
-      if ( data.fieldNameIndex[ i ] == data.keyFieldNr ) {
-        logError( BaseMessages.getString( PKG, "Denormaliser.Log.ValueFieldSameAsKeyField", field.getFieldName() ) );
-        setErrors( 1 );
+      if (data.fieldNameIndex[i] == data.keyFieldNr) {
+        logError(
+            BaseMessages.getString(
+                PKG, "Denormaliser.Log.ValueFieldSameAsKeyField", field.getFieldName()));
+        setErrors(1);
         stopAll();
         return false;
       }
 
-      // Fill a hashtable with the key strings and the position(s) of the field(s) in the row to take.
+      // Fill a hashtable with the key strings and the position(s) of the field(s) in the row to
+      // take.
       // Store the indexes in a List so that we can accommodate multiple key/value pairs...
       //
-      String keyValue = resolve( field.getKeyValue() );
-      List<Integer> indexes = data.keyValue.get( keyValue );
-      if ( indexes == null ) {
-        indexes = new ArrayList<>( 2 );
+      String keyValue = resolve(field.getKeyValue());
+      List<Integer> indexes = data.keyValue.get(keyValue);
+      if (indexes == null) {
+        indexes = new ArrayList<>(2);
       }
-      indexes.add( Integer.valueOf( i ) ); // Add the index to the list...
-      data.keyValue.put( keyValue, indexes ); // store the list
+      indexes.add(Integer.valueOf(i)); // Add the index to the list...
+      data.keyValue.put(keyValue, indexes); // store the list
     }
 
     Set<Integer> subjectSet = subjects.keySet();
-    data.fieldNrs = subjectSet.toArray( new Integer[ subjectSet.size() ] );
-
-    data.groupnrs = new int[ meta.getGroupField().length ];
-    for ( int i = 0; i < meta.getGroupField().length; i++ ) {
-      data.groupnrs[ i ] = data.inputRowMeta.indexOfValue( meta.getGroupField()[ i ] );
-      if ( data.groupnrs[ i ] < 0 ) {
-        logError( BaseMessages.getString( PKG, "Denormaliser.Log.GroupingFieldNotFound", meta.getGroupField()[ i ] ) );
-        setErrors( 1 );
+    data.fieldNrs = subjectSet.toArray(new Integer[subjectSet.size()]);
+
+    data.groupnrs = new int[meta.getGroupFields().size()];
+    for (int i = 0; i < meta.getGroupFields().size(); i++) {
+      data.groupnrs[i] = data.inputRowMeta.indexOfValue(meta.getGroupFields().get(i).getName());
+      if (data.groupnrs[i] < 0) {
+        logError(
+            BaseMessages.getString(
+                PKG, "Denormaliser.Log.GroupingFieldNotFound", meta.getGroupFields().get(i)));
+        setErrors(1);
         stopAll();
         return false;
       }
     }
 
     List<Integer> removeList = new ArrayList<>();
-    removeList.add( Integer.valueOf( data.keyFieldNr ) );
-    for ( int i = 0; i < data.fieldNrs.length; i++ ) {
-      removeList.add( data.fieldNrs[ i ] );
+    removeList.add(Integer.valueOf(data.keyFieldNr));
+    for (int i = 0; i < data.fieldNrs.length; i++) {
+      removeList.add(data.fieldNrs[i]);
     }
-    Collections.sort( removeList );
+    Collections.sort(removeList);
 
-    data.removeNrs = new int[ removeList.size() ];
-    for ( int i = 0; i < removeList.size(); i++ ) {
-      data.removeNrs[ i ] = removeList.get( i );
+    data.removeNrs = new int[removeList.size()];
+    for (int i = 0; i < removeList.size(); i++) {
+      data.removeNrs[i] = removeList.get(i);
     }
     return true;
   }
 
   private void handleLastRow() throws HopException {
     // Don't forget the last set of rows...
-    if ( data.previous != null ) {
+    if (data.previous != null) {
       // deNormalise(data.previous); --> That would over-do it.
       //
-      Object[] outputRowData = buildResult( data.inputRowMeta, data.previous );
-      putRow( data.outputRowMeta, outputRowData );
+      Object[] outputRowData = buildResult(data.inputRowMeta, data.previous);
+      putRow(data.outputRowMeta, outputRowData);
     }
   }
 
@@ -213,76 +221,80 @@ public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserDa
    * @return
    * @throws HopValueException
    */
-  Object[] buildResult( IRowMeta rowMeta, Object[] rowData ) throws HopValueException {
+  Object[] buildResult(IRowMeta rowMeta, Object[] rowData) throws HopValueException {
     // Deleting objects: we need to create a new object array
     // It's useless to call RowDataUtil.resizeArray
     //
-    Object[] outputRowData = RowDataUtil.allocateRowData( data.outputRowMeta.size() );
+    Object[] outputRowData = RowDataUtil.allocateRowData(data.outputRowMeta.size());
     int outputIndex = 0;
 
     // Copy the data from the incoming row, but remove the unwanted fields in the same loop...
     //
     int removeIndex = 0;
-    for ( int i = 0; i < rowMeta.size(); i++ ) {
-      if ( removeIndex < data.removeNrs.length && i == data.removeNrs[ removeIndex ] ) {
+    for (int i = 0; i < rowMeta.size(); i++) {
+      if (removeIndex < data.removeNrs.length && i == data.removeNrs[removeIndex]) {
         removeIndex++;
       } else {
-        outputRowData[ outputIndex++ ] = rowData[ i ];
+        outputRowData[outputIndex++] = rowData[i];
       }
     }
 
     // Add the unpivoted fields...
     //
-    for ( int i = 0; i < data.targetResult.length; i++ ) {
-      Object resultValue = data.targetResult[ i ];
-      DenormaliserTargetField field = meta.getDenormaliserTargetField()[ i ];
-      switch ( field.getTargetAggregationType() ) {
-        case DenormaliserTargetField.TYPE_AGGR_AVERAGE:
-          long count = data.counters[ i ];
-          Object sum = data.sum[ i ];
-          if ( count > 0 ) {
-            if ( sum instanceof Long ) {
+    for (int i = 0; i < data.targetResult.length; i++) {
+      Object resultValue = data.targetResult[i];
+      DenormaliserTargetField field = meta.getDenormaliserTargetFields().get(i);
+      switch (field.getTargetAggregationType()) {
+        case TYPE_AGGR_AVERAGE:
+          long count = data.counters[i];
+          Object sum = data.sum[i];
+          if (count > 0) {
+            if (sum instanceof Long) {
               resultValue = (Long) sum / count;
-            } else if ( sum instanceof Double ) {
+            } else if (sum instanceof Double) {
               resultValue = (Double) sum / count;
-            } else if ( sum instanceof BigDecimal ) {
-              resultValue = ( (BigDecimal) sum ).divide( new BigDecimal( count ) );
+            } else if (sum instanceof BigDecimal) {
+              resultValue = ((BigDecimal) sum).divide(new BigDecimal(count));
             } else {
-              resultValue = null; // TODO: perhaps throw an exception here?<
+              resultValue = null;
             }
           }
           break;
-        case DenormaliserTargetField.TYPE_AGGR_COUNT_ALL:
-          if ( resultValue == null ) {
-            resultValue = Long.valueOf( 0 );
+        case TYPE_AGGR_COUNT_ALL:
+          if (resultValue == null) {
+            resultValue = Long.valueOf(0);
           }
-          if ( field.getTargetType() != IValueMeta.TYPE_INTEGER ) {
+          if (!field
+              .getTargetType()
+              .equals(ValueMetaFactory.getValueMetaName(IValueMeta.TYPE_INTEGER))) {
             resultValue =
-              data.outputRowMeta.getValueMeta( outputIndex ).convertData(
-                new ValueMetaInteger( "num_values_aggregation" ), resultValue );
+                data.outputRowMeta
+                    .getValueMeta(outputIndex)
+                    .convertData(new ValueMetaInteger("num_values_aggregation"), resultValue);
           }
           break;
         default:
           break;
       }
-      if ( resultValue == null && allNullsAreZero ) {
-        //seems all rows for min function was nulls...
-        resultValue = getZero( outputIndex );
+      if (resultValue == null && allNullsAreZero) {
+        // seems all rows for min function was nulls...
+        resultValue = getZero(outputIndex);
       }
-      outputRowData[ outputIndex++ ] = resultValue;
+      outputRowData[outputIndex++] = resultValue;
     }
 
     return outputRowData;
   }
 
-  private Object getZero( int field ) throws HopValueException {
-    IValueMeta vm = data.outputRowMeta.getValueMeta( field );
-    return ValueDataUtil.getZeroForValueMetaType( vm );
+  private Object getZero(int field) throws HopValueException {
+    IValueMeta vm = data.outputRowMeta.getValueMeta(field);
+    return ValueDataUtil.getZeroForValueMetaType(vm);
   }
 
   // Is the row r of the same group as previous?
-  private boolean sameGroup( IRowMeta rowMeta, Object[] previous, Object[] rowData ) throws HopValueException {
-    return rowMeta.compare( previous, rowData, data.groupnrs ) == 0;
+  private boolean sameGroup(IRowMeta rowMeta, Object[] previous, Object[] rowData)
+      throws HopValueException {
+    return rowMeta.compare(previous, rowData, data.groupnrs) == 0;
   }
 
   /**
@@ -294,136 +306,135 @@ public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserDa
     // There is no need anymore to take care of the meta-data.
     // That is done once in DenormaliserMeta.getFields()
     //
-    data.targetResult = new Object[ meta.getDenormaliserTargetFields().length ];
+    data.targetResult = new Object[meta.getDenormaliserTargetFields().size()];
 
-    DenormaliserTargetField[] fields = meta.getDenormaliserTargetField();
+    List<DenormaliserTargetField> fields = meta.getDenormaliserTargetFields();
 
-    for ( int i = 0; i < fields.length; i++ ) {
-      data.counters[ i ] = 0L; // set to 0
-      data.sum[ i ] = null;
+    for (int i = 0; i < fields.size(); i++) {
+      data.counters[i] = 0L; // set to 0
+      data.sum[i] = null;
     }
   }
 
   /**
-   * This method de-normalizes a single key-value pair. It looks up the key and determines the value name to store it
-   * in. It converts it to the right type and stores it in the result row.
-   * <p>
-   * Used for junits in DenormaliserAggregationsTest
+   * This method de-normalizes a single key-value pair. It looks up the key and determines the value
+   * name to store it in. It converts it to the right type and stores it in the result row.
    *
-   * @param rowMeta
-   * @param rowData
+   * <p>Used for junits in DenormaliserAggregationsTest
+   *
+   * @param rowMeta Metadata of Row
+   * @param rowData Data of Row
    * @throws HopValueException
    */
-  void deNormalise( IRowMeta rowMeta, Object[] rowData ) throws HopValueException {
-    IValueMeta valueMeta = rowMeta.getValueMeta( data.keyFieldNr );
-    Object valueData = rowData[ data.keyFieldNr ];
+  void deNormalise(IRowMeta rowMeta, Object[] rowData) throws HopValueException {
+    IValueMeta valueMeta = rowMeta.getValueMeta(data.keyFieldNr);
+    Object valueData = rowData[data.keyFieldNr];
 
-    String key = valueMeta.getCompatibleString( valueData );
-    if ( Utils.isEmpty( key ) ) {
+    String key = valueMeta.getCompatibleString(valueData);
+    if (Utils.isEmpty(key)) {
       return;
     }
     // Get all the indexes for the given key value...
     //
-    List<Integer> indexes = data.keyValue.get( key );
-    if ( indexes == null ) { // otherwise we're not interested.
+    List<Integer> indexes = data.keyValue.get(key);
+    if (indexes == null) { // otherwise we're not interested.
       return;
     }
 
-    for ( Integer keyNr : indexes ) {
-      if ( keyNr == null ) {
+    for (Integer keyNr : indexes) {
+      if (keyNr == null) {
         continue;
       }
       // keyNr is the field in DenormaliserTargetField[]
       //
       int idx = keyNr.intValue();
-      DenormaliserTargetField field = meta.getDenormaliserTargetField()[ idx ];
+      DenormaliserTargetField field = meta.getDenormaliserTargetFields().get(idx);
 
       // This is the value we need to de-normalise, convert, aggregate.
       //
-      IValueMeta sourceMeta = rowMeta.getValueMeta( data.fieldNameIndex[ idx ] );
-      Object sourceData = rowData[ data.fieldNameIndex[ idx ] ];
+      IValueMeta sourceMeta = rowMeta.getValueMeta(data.fieldNameIndex[idx]);
+      Object sourceData = rowData[data.fieldNameIndex[idx]];
       Object targetData;
       // What is the target value metadata??
       //
       IValueMeta targetMeta =
-        data.outputRowMeta.getValueMeta( data.inputRowMeta.size() - data.removeNrs.length + idx );
+          data.outputRowMeta.getValueMeta(data.inputRowMeta.size() - data.removeNrs.length + idx);
       // What was the previous target in the result row?
       //
-      Object prevTargetData = data.targetResult[ idx ];
+      Object prevTargetData = data.targetResult[idx];
 
       // clone source meta as it can be used by other transforms ans set conversion meta
       // to convert date to target format
       IValueMeta origSourceMeta = sourceMeta;
-      if ( targetMeta.isDate() ) {
+      if (targetMeta.isDate()) {
         sourceMeta = origSourceMeta.clone();
-        sourceMeta.setConversionMetadata( getConversionMeta( field.getTargetFormat() ) );
+        sourceMeta.setConversionMetadata(getConversionMeta(field.getTargetFormat()));
       }
 
-      switch ( field.getTargetAggregationType() ) {
-        case DenormaliserTargetField.TYPE_AGGR_SUM:
-          targetData = targetMeta.convertData( sourceMeta, sourceData );
-          if ( prevTargetData != null ) {
-            prevTargetData = ValueDataUtil.sum( targetMeta, prevTargetData, targetMeta, targetData );
+      switch (field.getTargetAggregationType()) {
+        case TYPE_AGGR_SUM:
+          targetData = targetMeta.convertData(sourceMeta, sourceData);
+          if (prevTargetData != null) {
+            prevTargetData = ValueDataUtil.sum(targetMeta, prevTargetData, targetMeta, targetData);
           } else {
             prevTargetData = targetData;
           }
           break;
-        case DenormaliserTargetField.TYPE_AGGR_MIN:
-          if ( sourceData == null && !minNullIsValued ) {
+        case TYPE_AGGR_MIN:
+          if (sourceData == null && !minNullIsValued) {
             break;
           }
-          if ( ( prevTargetData == null && !minNullIsValued )
-            || sourceMeta.compare( sourceData, targetMeta, prevTargetData ) < 0 ) {
-            prevTargetData = targetMeta.convertData( sourceMeta, sourceData );
+          if ((prevTargetData == null && !minNullIsValued)
+              || sourceMeta.compare(sourceData, targetMeta, prevTargetData) < 0) {
+            prevTargetData = targetMeta.convertData(sourceMeta, sourceData);
           }
           break;
-        case DenormaliserTargetField.TYPE_AGGR_MAX:
-          if ( sourceMeta.compare( sourceData, targetMeta, prevTargetData ) > 0 ) {
-            prevTargetData = targetMeta.convertData( sourceMeta, sourceData );
+        case TYPE_AGGR_MAX:
+          if (sourceMeta.compare(sourceData, targetMeta, prevTargetData) > 0) {
+            prevTargetData = targetMeta.convertData(sourceMeta, sourceData);
           }
           break;
-        case DenormaliserTargetField.TYPE_AGGR_COUNT_ALL:
-          prevTargetData = ++data.counters[ idx ];
+        case TYPE_AGGR_COUNT_ALL:
+          prevTargetData = ++data.counters[idx];
           break;
-        case DenormaliserTargetField.TYPE_AGGR_AVERAGE:
-          targetData = targetMeta.convertData( sourceMeta, sourceData );
-          if ( !sourceMeta.isNull( sourceData ) ) {
-            prevTargetData = data.counters[ idx ]++;
-            if ( data.sum[ idx ] == null ) {
-              data.sum[ idx ] = targetData;
+        case TYPE_AGGR_AVERAGE:
+          targetData = targetMeta.convertData(sourceMeta, sourceData);
+          if (!sourceMeta.isNull(sourceData)) {
+            prevTargetData = data.counters[idx]++;
+            if (data.sum[idx] == null) {
+              data.sum[idx] = targetData;
             } else {
-              data.sum[ idx ] = ValueDataUtil.plus( targetMeta, data.sum[ idx ], targetMeta, targetData );
+              data.sum[idx] = ValueDataUtil.plus(targetMeta, data.sum[idx], targetMeta, targetData);
             }
-            // data.sum[idx] = (Integer)data.sum[idx] + (Integer)sourceData;
           }
           break;
-        case DenormaliserTargetField.TYPE_AGGR_CONCAT_COMMA:
+        case TYPE_AGGR_CONCAT_COMMA:
           String separator = ",";
 
-          targetData = targetMeta.convertData( sourceMeta, sourceData );
-          if ( prevTargetData != null ) {
+          targetData = targetMeta.convertData(sourceMeta, sourceData);
+          if (prevTargetData != null) {
             prevTargetData = prevTargetData + separator + targetData;
           } else {
             prevTargetData = targetData;
           }
           break;
-        case DenormaliserTargetField.TYPE_AGGR_NONE:
+        case TYPE_AGGR_NONE:
         default:
-          prevTargetData = targetMeta.convertData( sourceMeta, sourceData ); // Overwrite the previous
+          prevTargetData = targetMeta.convertData(sourceMeta, sourceData); // Overwrite the previous
           break;
       }
 
       // Update the result row too
       //
-      data.targetResult[ idx ] = prevTargetData;
+      data.targetResult[idx] = prevTargetData;
     }
   }
 
   @Override
-  public boolean init(){
-    if ( super.init() ) {
-      data.counters = new long[ meta.getDenormaliserTargetField().length ];
-      data.sum = new Object[ meta.getDenormaliserTargetField().length ];
+  public boolean init() {
+    if (super.init()) {
+      data.counters = new long[meta.getDenormaliserTargetFields().size()];
+      data.sum = new Object[meta.getDenormaliserTargetFields().size()];
 
       return true;
     }
@@ -439,17 +450,17 @@ public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserDa
   /**
    * Get the metadata used for conversion to date format See related
    *
-   * @param mask
-   * @return
+   * @param mask get the metadata used for conversion mask
+   * @return return converted meta
    */
-  private IValueMeta getConversionMeta( String mask ) {
+  private IValueMeta getConversionMeta(String mask) {
     IValueMeta meta = null;
-    if ( !Utils.isEmpty( mask ) ) {
-      meta = conversionMetaCache.get( mask );
-      if ( meta == null ) {
+    if (!Utils.isEmpty(mask)) {
+      meta = conversionMetaCache.get(mask);
+      if (meta == null) {
         meta = new ValueMetaDate();
-        meta.setConversionMask( mask );
-        conversionMetaCache.put( mask, meta );
+        meta.setConversionMask(mask);
+        conversionMetaCache.put(mask, meta);
       }
     }
     return meta;
@@ -460,7 +471,7 @@ public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserDa
    *
    * @param allNullsAreZero the allNullsAreZero to set
    */
-  void setAllNullsAreZero( boolean allNullsAreZero ) {
+  void setAllNullsAreZero(boolean allNullsAreZero) {
     this.allNullsAreZero = allNullsAreZero;
   }
 
@@ -469,8 +480,7 @@ public class Denormaliser extends BaseTransform<DenormaliserMeta, DenormaliserDa
    *
    * @param minNullIsValued the minNullIsValued to set
    */
-  void setMinNullIsValued( boolean minNullIsValued ) {
+  void setMinNullIsValued(boolean minNullIsValued) {
     this.minNullIsValued = minNullIsValued;
   }
-
 }
diff --git a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserData.java b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserData.java
index 37cfcc0..d6a8893 100644
--- a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserData.java
+++ b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserData.java
@@ -61,5 +61,4 @@ public class DenormaliserData extends BaseTransformData implements ITransformDat
     previous = null;
     keyValue = new Hashtable<>();
   }
-
 }
diff --git a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserDialog.java b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserDialog.java
index 9b3914f..fa51f9c 100644
--- a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserDialog.java
+++ b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserDialog.java
@@ -30,30 +30,19 @@ import org.apache.hop.pipeline.transform.ITransformDialog;
 import org.apache.hop.ui.core.dialog.BaseDialog;
 import org.apache.hop.ui.core.dialog.ErrorDialog;
 import org.apache.hop.ui.core.dialog.MessageDialogWithToggle;
-import org.apache.hop.ui.core.gui.GuiResource;
 import org.apache.hop.ui.core.widget.ColumnInfo;
 import org.apache.hop.ui.core.widget.TableView;
 import org.apache.hop.ui.pipeline.transform.BaseTransformDialog;
 import org.eclipse.swt.SWT;
 import org.eclipse.swt.custom.CCombo;
 import org.eclipse.swt.events.ModifyListener;
+import org.eclipse.swt.events.MouseAdapter;
 import org.eclipse.swt.events.MouseEvent;
-import org.eclipse.swt.events.MouseListener;
-import org.eclipse.swt.events.SelectionAdapter;
-import org.eclipse.swt.events.SelectionEvent;
-import org.eclipse.swt.events.ShellAdapter;
-import org.eclipse.swt.events.ShellEvent;
 import org.eclipse.swt.graphics.Cursor;
 import org.eclipse.swt.layout.FormAttachment;
 import org.eclipse.swt.layout.FormData;
 import org.eclipse.swt.layout.FormLayout;
-import org.eclipse.swt.widgets.Button;
-import org.eclipse.swt.widgets.Display;
-import org.eclipse.swt.widgets.Label;
-import org.eclipse.swt.widgets.Listener;
-import org.eclipse.swt.widgets.Shell;
-import org.eclipse.swt.widgets.TableItem;
-import org.eclipse.swt.widgets.Text;
+import org.eclipse.swt.widgets.*;
 
 public class DenormaliserDialog extends BaseTransformDialog implements ITransformDialog {
   private static final Class<?> PKG = DenormaliserMeta.class; // For Translator
@@ -134,27 +123,9 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
     fdKeyField.top = new FormAttachment(wTransformName, margin);
     fdKeyField.right = new FormAttachment(100, 0);
     wKeyField.setLayoutData(fdKeyField);
-    //    wKeyField.addFocusListener( new FocusListener() {
-    //      public void focusLost( org.eclipse.swt.events.FocusEvent e ) {
-    //      }
-    //
-    //      public void focusGained( org.eclipse.swt.events.FocusEvent e ) {
-    //        Cursor busy = new Cursor( shell.getDisplay(), SWT.CURSOR_WAIT );
-    //        shell.setCursor( busy );
-    //        getPreviousFieldNames();
-    //        shell.setCursor( null );
-    //        busy.dispose();
-    //      }
-    //    } );
 
     wKeyField.addMouseListener(
-        new MouseListener() {
-          @Override
-          public void mouseDoubleClick(MouseEvent e) {}
-
-          @Override
-          public void mouseDown(MouseEvent e) {}
-
+        new MouseAdapter() {
           @Override
           public void mouseUp(MouseEvent e) {
             Cursor busy = new Cursor(shell.getDisplay(), SWT.CURSOR_WAIT);
@@ -174,7 +145,7 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
     wlGroup.setLayoutData(fdlGroup);
 
     int nrKeyCols = 1;
-    int nrKeyRows = (input.getGroupField() != null ? input.getGroupField().length : 1);
+    int nrKeyRows = (input.getGroupFields() != null ? input.getGroupFields().size() : 1);
 
     ColumnInfo[] ciKey = new ColumnInfo[nrKeyCols];
     ciKey[0] =
@@ -217,8 +188,8 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
     wlTarget.setLayoutData(fdlTarget);
 
     int UpInsRows =
-        (input.getDenormaliserTargetField() != null
-            ? input.getDenormaliserTargetField().length
+        (input.getDenormaliserTargetFields() != null
+            ? input.getDenormaliserTargetFields().size()
             : 1);
 
     ColumnInfo[] ciTarget =
@@ -271,7 +242,7 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
           new ColumnInfo(
               BaseMessages.getString(PKG, "DenormaliserDialog.ColumnInfo.Aggregation"),
               ColumnInfo.COLUMN_TYPE_CCOMBO,
-              DenormaliserTargetField.typeAggrLongDesc,
+              DenormaliserTargetField.DenormaliseAggregation.getDescriptions(),
               false),
         };
 
@@ -332,18 +303,19 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
       wKeyField.setText(input.getKeyField());
     }
 
-    if (input.getGroupField() != null) {
-      for (int i = 0; i < input.getGroupField().length; i++) {
+    if (input.getGroupFields() != null) {
+      for (int i = 0; i < input.getGroupFields().size(); i++) {
         TableItem item = wGroup.table.getItem(i);
-        if (input.getGroupField()[i] != null) {
-          item.setText(1, input.getGroupField()[i]);
+        if (input.getGroupFields().get(i) != null) {
+          DenormaliserGroupField groupfield = input.getGroupFields().get(i);
+          item.setText(1, groupfield.getName());
         }
       }
     }
 
-    if (input.getDenormaliserTargetField() != null) {
-      for (int i = 0; i < input.getDenormaliserTargetField().length; i++) {
-        DenormaliserTargetField field = input.getDenormaliserTargetField()[i];
+    if (input.getDenormaliserTargetFields() != null) {
+      for (int i = 0; i < input.getDenormaliserTargetFields().size(); i++) {
+        DenormaliserTargetField field = input.getDenormaliserTargetFields().get(i);
 
         TableItem item = wTarget.table.getItem(i);
 
@@ -356,8 +328,8 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
         if (field.getKeyValue() != null) {
           item.setText(3, field.getKeyValue());
         }
-        if (field.getTargetTypeDesc() != null) {
-          item.setText(4, field.getTargetTypeDesc());
+        if (field.getTargetType() != null) {
+          item.setText(4, field.getTargetType());
         }
         if (field.getTargetFormat() != null) {
           item.setText(5, field.getTargetFormat());
@@ -380,8 +352,8 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
         if (field.getTargetNullString() != null) {
           item.setText(11, field.getTargetNullString());
         }
-        if (field.getTargetAggregationType() >= 0) {
-          item.setText(12, field.getTargetAggregationTypeDescLong());
+        if (field.getTargetAggregationType().getDefaultResultType() >= 0) {
+          item.setText(12, field.getTargetAggregationType().getDescription());
         }
       }
     }
@@ -409,13 +381,15 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
     int nrFields = wTarget.nrNonEmpty();
 
     input.setKeyField(wKeyField.getText());
-
-    input.allocate(sizegroup, nrFields);
+    input.getGroupFields().clear();
+    input.getDenormaliserTargetFields().clear();
 
     for (int i = 0; i < sizegroup; i++) {
       TableItem item = wGroup.getNonEmpty(i);
       // CHECKSTYLE:Indentation:OFF
-      input.getGroupField()[i] = item.getText(1);
+      DenormaliserGroupField groupfield = new DenormaliserGroupField();
+      groupfield.setName(item.getText(1));
+      input.getGroupFields().add(groupfield);
     }
 
     for (int i = 0; i < nrFields; i++) {
@@ -433,10 +407,11 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
       field.setTargetDecimalSymbol(item.getText(9));
       field.setTargetGroupingSymbol(item.getText(10));
       field.setTargetNullString(item.getText(11));
-      field.setTargetAggregationType(item.getText(12));
+      field.setTargetAggregationType(
+          DenormaliserTargetField.DenormaliseAggregation.getTypeWithDescription(item.getText(12)));
 
       // CHECKSTYLE:Indentation:OFF
-      input.getDenormaliserTargetField()[i] = field;
+      input.getDenormaliserTargetFields().add(field);
     }
 
     transformName = wTransformName.getText();
@@ -493,21 +468,21 @@ public class DenormaliserDialog extends BaseTransformDialog implements ITransfor
             -1,
             -1,
             (tableItem, v) -> {
-              if (Const.indexOfString(v.getName(), groupingFields) < 0) { // Not a grouping field
-                if (!wKeyField.getText().equalsIgnoreCase(v.getName())) { // Not the key field
-                  int nr = tableItem.getParent().indexOf(tableItem) + 1;
-                  tableItem.setText(
-                      1,
-                      BaseMessages.getString(PKG, "DenormaliserDialog.TargetFieldname.Label")
-                          + nr); // the target fieldname
-                  tableItem.setText(2, v.getName());
-                  tableItem.setText(4, v.getTypeDesc());
-                  if (v.getLength() >= 0) {
-                    tableItem.setText(6, "" + v.getLength());
-                  }
-                  if (v.getPrecision() >= 0) {
-                    tableItem.setText(7, "" + v.getPrecision());
-                  }
+              if (Const.indexOfString(v.getName(), groupingFields) < 0
+                  && !wKeyField.getText().equalsIgnoreCase(v.getName())) { // Not a grouping field
+                // Not the key field
+                int nr = tableItem.getParent().indexOf(tableItem) + 1;
+                tableItem.setText(
+                    1,
+                    BaseMessages.getString(PKG, "DenormaliserDialog.TargetFieldname.Label")
+                        + nr); // the target fieldname
+                tableItem.setText(2, v.getName());
+                tableItem.setText(4, v.getTypeDesc());
+                if (v.getLength() >= 0) {
+                  tableItem.setText(6, "" + v.getLength());
+                }
+                if (v.getPrecision() >= 0) {
+                  tableItem.setText(7, "" + v.getPrecision());
                 }
               }
               return true;
diff --git a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserGroupField.java b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserGroupField.java
new file mode 100644
index 0000000..bbbd4af
--- /dev/null
+++ b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserGroupField.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hop.pipeline.transforms.denormaliser;
+
+import org.apache.hop.metadata.api.HopMetadataProperty;
+
+/**
+ * Utility class that contains the groupfield name, created for backwards compatibility with
+ * existing xml
+ */
+public class DenormaliserGroupField implements Cloneable {
+
+  /** The value to group on */
+  @HopMetadataProperty(
+      key = "name",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.GroupField")
+  private String name;
+
+  public DenormaliserGroupField() {}
+
+  public DenormaliserGroupField(DenormaliserGroupField g) {
+    this.name = g.name;
+  }
+
+  public DenormaliserGroupField clone() {
+    return new DenormaliserGroupField(this);
+  }
+
+  /**
+   * get the group field name
+   *
+   * @return name of the groupfield
+   */
+  public String getName() {
+    return name;
+  }
+
+  /**
+   * set the group field name
+   *
+   * @param name to set the name of the groupfield
+   */
+  public void setName(String name) {
+    this.name = name;
+  }
+}
diff --git a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserMeta.java b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserMeta.java
index 49a689e..da0c512 100644
--- a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserMeta.java
+++ b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserMeta.java
@@ -18,126 +18,101 @@
 package org.apache.hop.pipeline.transforms.denormaliser;
 
 import org.apache.hop.core.CheckResult;
-import org.apache.hop.core.Const;
 import org.apache.hop.core.ICheckResult;
 import org.apache.hop.core.annotations.Transform;
 import org.apache.hop.core.exception.HopTransformException;
-import org.apache.hop.core.exception.HopXmlException;
 import org.apache.hop.core.row.IRowMeta;
 import org.apache.hop.core.row.IValueMeta;
 import org.apache.hop.core.row.value.ValueMetaFactory;
 import org.apache.hop.core.variables.IVariables;
-import org.apache.hop.core.xml.XmlHandler;
 import org.apache.hop.i18n.BaseMessages;
+import org.apache.hop.metadata.api.HopMetadataProperty;
 import org.apache.hop.metadata.api.IHopMetadataProvider;
 import org.apache.hop.pipeline.Pipeline;
 import org.apache.hop.pipeline.PipelineMeta;
 import org.apache.hop.pipeline.transform.BaseTransformMeta;
 import org.apache.hop.pipeline.transform.ITransformMeta;
 import org.apache.hop.pipeline.transform.TransformMeta;
-import org.w3c.dom.Node;
 
+import java.util.ArrayList;
 import java.util.List;
 
-/**
- * The Denormaliser pipeline transform meta-data
- *
- * @author Matt
- * @since 17-jan-2006
- */
 @Transform(
     id = "Denormaliser",
     image = "denormaliser.svg",
     name = "i18n::Denormaliser.Name",
     description = "i18n::Denormaliser.Description",
     categoryDescription = "i18n:org.apache.hop.pipeline.transform:BaseTransform.Category.Transform",
-    documentationUrl = "https://hop.apache.org/manual/latest/pipeline/transforms/rowdenormaliser.html")
+    documentationUrl =
+        "https://hop.apache.org/manual/latest/pipeline/transforms/rowdenormaliser.html")
 public class DenormaliserMeta extends BaseTransformMeta
     implements ITransformMeta<Denormaliser, DenormaliserData> {
   private static final Class<?> PKG = DenormaliserMeta.class; // For Translator
 
   /** Fields to group over */
-  private String[] groupField;
+  @HopMetadataProperty(groupKey = "group", key = "field")
+  private List<DenormaliserGroupField> groupFields;
 
   /** The key field */
+  @HopMetadataProperty(
+      key = "key_field",
+      injectionKeyDescription = "DenormaliserDialog.KeyField.Label")
   private String keyField;
 
   /** The fields to unpivot */
-  private DenormaliserTargetField[] denormaliserTargetField;
+  @HopMetadataProperty(groupKey = "fields", key = "field")
+  private List<DenormaliserTargetField> denormaliserTargetFields;
 
   public DenormaliserMeta() {
-    super(); // allocate BaseTransformMeta
-  }
-
-  /** @return Returns the keyField. */
-  public String getKeyField() {
-    return keyField;
-  }
-
-  /** @param keyField The keyField to set. */
-  public void setKeyField(String keyField) {
-    this.keyField = keyField;
+    groupFields = new ArrayList<>();
+    denormaliserTargetFields = new ArrayList<>();
   }
 
-  /** @return Returns the groupField. */
-  public String[] getGroupField() {
-    return groupField;
+  public DenormaliserMeta(DenormaliserMeta m) {
+    this.denormaliserTargetFields = m.denormaliserTargetFields;
+    this.keyField = m.keyField;
+    this.groupFields = m.groupFields;
   }
 
-  /** @param groupField The groupField to set. */
-  public void setGroupField(String[] groupField) {
-    this.groupField = groupField;
-  }
+  @Override
+  public DenormaliserMeta clone() {
+    DenormaliserMeta meta = new DenormaliserMeta();
 
-  public String[] getDenormaliserTargetFields() {
-    String[] fields = new String[denormaliserTargetField.length];
-    for (int i = 0; i < fields.length; i++) {
-      fields[i] = denormaliserTargetField[i].getTargetName();
+    for (DenormaliserTargetField target : denormaliserTargetFields) {
+      meta.getDenormaliserTargetFields().add(new DenormaliserTargetField(target));
     }
 
-    return fields;
-  }
-
-  public DenormaliserTargetField searchTargetField(String targetName) {
-    for (int i = 0; i < denormaliserTargetField.length; i++) {
-      DenormaliserTargetField field = denormaliserTargetField[i];
-      if (field.getTargetName().equalsIgnoreCase(targetName)) {
-        return field;
-      }
-    }
-    return null;
+    return meta;
   }
 
-  /** @return Returns the pivotField. */
-  public DenormaliserTargetField[] getDenormaliserTargetField() {
-    return denormaliserTargetField;
+  /** @return Returns the keyField. */
+  public String getKeyField() {
+    return keyField;
   }
 
-  /** @param pivotField The pivotField to set. */
-  public void setDenormaliserTargetField(DenormaliserTargetField[] pivotField) {
-    this.denormaliserTargetField = pivotField;
+  /** @param keyField The keyField to set. */
+  public void setKeyField(String keyField) {
+    this.keyField = keyField;
   }
 
-  public void loadXml(Node transformNode, IHopMetadataProvider metadataProvider)
-      throws HopXmlException {
-    readData(transformNode);
+  /** @return Returns the groupField. */
+  public List<DenormaliserGroupField> getGroupFields() {
+    return groupFields;
   }
 
-  public void allocate(int sizegroup, int nrFields) {
-    groupField = new String[sizegroup];
-    denormaliserTargetField = new DenormaliserTargetField[nrFields];
+  /** @param groupFields The groupField to set. */
+  public void setGroupFields(List<DenormaliserGroupField> groupFields) {
+    this.groupFields = groupFields;
   }
 
-  public Object clone() {
-    Object retval = super.clone();
-    return retval;
+  /** @return Return the Targetfields */
+  public List<DenormaliserTargetField> getDenormaliserTargetFields() {
+    return denormaliserTargetFields;
   }
 
-  public void setDefault() {
-    int sizegroup = 0;
-    int nrFields = 0;
-
-    allocate(sizegroup, nrFields);
+  /** @param denormaliserTargetFields the denormaliserTargetField to set */
+  public void setDenormaliserTargetFields(List<DenormaliserTargetField> denormaliserTargetFields) {
+    this.denormaliserTargetFields = denormaliserTargetFields;
   }
 
   @Override
@@ -167,8 +142,8 @@ public class DenormaliserMeta extends BaseTransformMeta
 
     // Remove all field value(s) (there will be different entries for each output row)
     //
-    for (int i = 0; i < denormaliserTargetField.length; i++) {
-      String fieldname = denormaliserTargetField[i].getFieldName();
+    for (int i = 0; i < denormaliserTargetFields.size(); i++) {
+      String fieldname = denormaliserTargetFields.get(i).getFieldName();
       if (fieldname != null && fieldname.length() > 0) {
         int idx = row.indexOfValue(fieldname);
         if (idx >= 0) {
@@ -182,11 +157,11 @@ public class DenormaliserMeta extends BaseTransformMeta
     }
 
     // Re-add the target fields
-    for (int i = 0; i < denormaliserTargetField.length; i++) {
-      DenormaliserTargetField field = denormaliserTargetField[i];
+    for (DenormaliserTargetField field : denormaliserTargetFields) {
       try {
         IValueMeta target =
-            ValueMetaFactory.createValueMeta(field.getTargetName(), field.getTargetType());
+            ValueMetaFactory.createValueMeta(
+                field.getTargetName(), ValueMetaFactory.getIdForValueMeta(field.getTargetType()));
         target.setLength(field.getTargetLength(), field.getTargetPrecision());
         target.setOrigin(name);
         row.addValueMeta(target);
@@ -196,105 +171,7 @@ public class DenormaliserMeta extends BaseTransformMeta
     }
   }
 
-  private void readData(Node transformNode) throws HopXmlException {
-    try {
-      keyField = XmlHandler.getTagValue(transformNode, "key_field");
-
-      Node groupn = XmlHandler.getSubNode(transformNode, "group");
-      Node fields = XmlHandler.getSubNode(transformNode, "fields");
-
-      int sizegroup = XmlHandler.countNodes(groupn, "field");
-      int nrFields = XmlHandler.countNodes(fields, "field");
-
-      allocate(sizegroup, nrFields);
-
-      for (int i = 0; i < sizegroup; i++) {
-        Node fnode = XmlHandler.getSubNodeByNr(groupn, "field", i);
-        groupField[i] = XmlHandler.getTagValue(fnode, "name");
-      }
-
-      for (int i = 0; i < nrFields; i++) {
-        Node fnode = XmlHandler.getSubNodeByNr(fields, "field", i);
-        denormaliserTargetField[i] = new DenormaliserTargetField();
-        denormaliserTargetField[i].setFieldName(XmlHandler.getTagValue(fnode, "field_name"));
-        denormaliserTargetField[i].setKeyValue(XmlHandler.getTagValue(fnode, "key_value"));
-        denormaliserTargetField[i].setTargetName(XmlHandler.getTagValue(fnode, "target_name"));
-        denormaliserTargetField[i].setTargetType(XmlHandler.getTagValue(fnode, "target_type"));
-        denormaliserTargetField[i].setTargetFormat(XmlHandler.getTagValue(fnode, "target_format"));
-        denormaliserTargetField[i].setTargetLength(
-            Const.toInt(XmlHandler.getTagValue(fnode, "target_length"), -1));
-        denormaliserTargetField[i].setTargetPrecision(
-            Const.toInt(XmlHandler.getTagValue(fnode, "target_precision"), -1));
-        denormaliserTargetField[i].setTargetDecimalSymbol(
-            XmlHandler.getTagValue(fnode, "target_decimal_symbol"));
-        denormaliserTargetField[i].setTargetGroupingSymbol(
-            XmlHandler.getTagValue(fnode, "target_grouping_symbol"));
-        denormaliserTargetField[i].setTargetCurrencySymbol(
-            XmlHandler.getTagValue(fnode, "target_currency_symbol"));
-        denormaliserTargetField[i].setTargetNullString(
-            XmlHandler.getTagValue(fnode, "target_null_string"));
-        denormaliserTargetField[i].setTargetAggregationType(
-            XmlHandler.getTagValue(fnode, "target_aggregation_type"));
-      }
-    } catch (Exception e) {
-      throw new HopXmlException(
-          BaseMessages.getString(
-              PKG, "DenormaliserMeta.Exception.UnableToLoadTransformMetaFromXML"),
-          e);
-    }
-  }
-
-  public String getXml() {
-    StringBuilder retval = new StringBuilder();
-
-    retval.append("      " + XmlHandler.addTagValue("key_field", keyField));
-
-    retval.append("      <group>" + Const.CR);
-    for (int i = 0; i < groupField.length; i++) {
-      retval.append("        <field>" + Const.CR);
-      retval.append("          " + XmlHandler.addTagValue("name", groupField[i]));
-      retval.append("          </field>" + Const.CR);
-    }
-    retval.append("        </group>" + Const.CR);
-
-    retval.append("      <fields>" + Const.CR);
-    for (int i = 0; i < denormaliserTargetField.length; i++) {
-      DenormaliserTargetField field = denormaliserTargetField[i];
-
-      retval.append("        <field>" + Const.CR);
-      retval.append("          " + XmlHandler.addTagValue("field_name", field.getFieldName()));
-      retval.append("          " + XmlHandler.addTagValue("key_value", field.getKeyValue()));
-      retval.append("          " + XmlHandler.addTagValue("target_name", field.getTargetName()));
-      retval.append(
-          "          " + XmlHandler.addTagValue("target_type", field.getTargetTypeDesc()));
-      retval.append(
-          "          " + XmlHandler.addTagValue("target_format", field.getTargetFormat()));
-      retval.append(
-          "          " + XmlHandler.addTagValue("target_length", field.getTargetLength()));
-      retval.append(
-          "          " + XmlHandler.addTagValue("target_precision", field.getTargetPrecision()));
-      retval.append(
-          "          "
-              + XmlHandler.addTagValue("target_decimal_symbol", field.getTargetDecimalSymbol()));
-      retval.append(
-          "          "
-              + XmlHandler.addTagValue("target_grouping_symbol", field.getTargetGroupingSymbol()));
-      retval.append(
-          "          "
-              + XmlHandler.addTagValue("target_currency_symbol", field.getTargetCurrencySymbol()));
-      retval.append(
-          "          " + XmlHandler.addTagValue("target_null_string", field.getTargetNullString()));
-      retval.append(
-          "          "
-              + XmlHandler.addTagValue(
-                  "target_aggregation_type", field.getTargetAggregationTypeDesc()));
-      retval.append("          </field>" + Const.CR);
-    }
-    retval.append("        </fields>" + Const.CR);
-
-    return retval.toString();
-  }
-
+  @Override
   public void check(
       List<ICheckResult> remarks,
       PipelineMeta pipelineMeta,
@@ -310,7 +187,7 @@ public class DenormaliserMeta extends BaseTransformMeta
     if (input.length > 0) {
       cr =
           new CheckResult(
-              CheckResult.TYPE_RESULT_OK,
+              ICheckResult.TYPE_RESULT_OK,
               BaseMessages.getString(
                   PKG, "DenormaliserMeta.CheckResult.ReceivingInfoFromOtherTransforms"),
               transformMeta);
@@ -318,7 +195,7 @@ public class DenormaliserMeta extends BaseTransformMeta
     } else {
       cr =
           new CheckResult(
-              CheckResult.TYPE_RESULT_ERROR,
+              ICheckResult.TYPE_RESULT_ERROR,
               BaseMessages.getString(PKG, "DenormaliserMeta.CheckResult.NoInputReceived"),
               transformMeta);
       remarks.add(cr);
diff --git a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserTargetField.java b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserTargetField.java
index 983fc68..061009a 100644
--- a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserTargetField.java
+++ b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserTargetField.java
@@ -19,279 +19,350 @@ package org.apache.hop.pipeline.transforms.denormaliser;
 
 import org.apache.hop.core.row.value.ValueMetaFactory;
 import org.apache.hop.i18n.BaseMessages;
+import org.apache.hop.metadata.api.HopMetadataProperty;
+import org.apache.hop.metadata.api.IEnumHasCode;
 
-/**
- * Contains the properties of the target field, conversion mask, type, aggregation method, etc.
- *
- * @author Matt
- * @since 17-jan-2006
- */
-public class DenormaliserTargetField {
+/** Contains the properties of the target field, conversion mask, type, aggregation method, etc. */
+public class DenormaliserTargetField implements Cloneable {
   private static final Class<?> PKG = DenormaliserMeta.class; // For Translator
 
+  @HopMetadataProperty(
+      key = "field_name",
+      injectionKey = "NAME",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.ValueFieldname")
   private String fieldName;
+
+  @HopMetadataProperty(
+      key = "key_value",
+      injectionKey = "KEY_VALUE",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Keyvalue")
   private String keyValue;
+
+  @HopMetadataProperty(
+      key = "target_name",
+      injectionKey = "TARGET_NAME",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.TargetFieldname")
   private String targetName;
-  private int targetType;
+
+  @HopMetadataProperty(
+      key = "target_type",
+      injectionKey = "TARGET_TYPE",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Type")
+  private String targetType;
+
+  @HopMetadataProperty(
+      key = "target_length",
+      injectionKey = "TARGET_LENGTH",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Length")
   private int targetLength;
+
+  @HopMetadataProperty(
+      key = "target_precision",
+      injectionKey = "TARGET_PRECISION",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Precision")
   private int targetPrecision;
+
+  @HopMetadataProperty(
+      key = "target_currency_symbol",
+      injectionKey = "TARGET_CURRENCY",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Currency")
   private String targetCurrencySymbol;
+
+  @HopMetadataProperty(
+      key = "target_decimal_symbol",
+      injectionKey = "TARGET_DECIMAL",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Decimal")
   private String targetDecimalSymbol;
+
+  @HopMetadataProperty(
+      key = "target_grouping_symbol",
+      injectionKey = "TARGET_GROUP",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Group")
   private String targetGroupingSymbol;
+
+  @HopMetadataProperty(
+      key = "target_null_string",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.NullIf")
   private String targetNullString;
+
+  @HopMetadataProperty(
+      key = "target_format",
+      injectionKey = "TARGET_FORMAT",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Format")
   private String targetFormat;
-  private int targetAggregationType;
-
-  public static final int TYPE_AGGR_NONE = 0;
-  public static final int TYPE_AGGR_SUM = 1;
-  public static final int TYPE_AGGR_AVERAGE = 2;
-  public static final int TYPE_AGGR_MIN = 3;
-  public static final int TYPE_AGGR_MAX = 4;
-  public static final int TYPE_AGGR_COUNT_ALL = 5;
-  public static final int TYPE_AGGR_CONCAT_COMMA = 6;
-
-  public static final String[] typeAggrDesc = /* WARNING: DO NOT TRANSLATE THIS. WE ARE SERIOUS, DON'T TRANSLATE! */
-    { "-", "SUM", "AVERAGE", "MIN", "MAX", "COUNT_ALL", "CONCAT_COMMA"
-    };
-
-  public static final String[] typeAggrLongDesc =
-    {
-      "-", BaseMessages.getString( PKG, "DenormaliserTargetField.TypeAggrLongDesc.Sum" ),
-      BaseMessages.getString( PKG, "DenormaliserTargetField.TypeAggrLongDesc.Average" ),
-      BaseMessages.getString( PKG, "DenormaliserTargetField.TypeAggrLongDesc.Min" ),
-      BaseMessages.getString( PKG, "DenormaliserTargetField.TypeAggrLongDesc.Max" ),
-      BaseMessages.getString( PKG, "DenormaliserTargetField.TypeAggrLongDesc.CountAll" ),
-      BaseMessages.getString( PKG, "DenormaliserTargetField.TypeAggrLongDesc.ConcatComma" )
-    };
 
-  /**
-   * @return Returns the fieldName.
-   */
+  @HopMetadataProperty(
+      key = "target_aggregation_type",
+      injectionKey = "TARGET_AGGREGATION",
+      injectionKeyDescription = "DenormaliserDialog.ColumnInfo.Aggregation",
+      storeWithCode = true)
+  private DenormaliseAggregation targetAggregationType;
+
+  /** enum for the Aggregation type */
+  public enum DenormaliseAggregation implements IEnumHasCode {
+    TYPE_AGGR_NONE("-", "-", 0),
+    TYPE_AGGR_SUM(
+        "SUM", BaseMessages.getString(PKG, "DenormaliserTargetField.TypeAggrLongDesc.Sum"), 1),
+    TYPE_AGGR_AVERAGE(
+        "AVERAGE",
+        BaseMessages.getString(PKG, "DenormaliserTargetField.TypeAggrLongDesc.Average"),
+        2),
+    TYPE_AGGR_MIN(
+        "MIN", BaseMessages.getString(PKG, "DenormaliserTargetField.TypeAggrLongDesc.Min"), 3),
+    TYPE_AGGR_MAX(
+        "MAX", BaseMessages.getString(PKG, "DenormaliserTargetField.TypeAggrLongDesc.Max"), 4),
+    TYPE_AGGR_COUNT_ALL(
+        "COUNT_ALL",
+        BaseMessages.getString(PKG, "DenormaliserTargetField.TypeAggrLongDesc.CountAll"),
+        5),
+    TYPE_AGGR_CONCAT_COMMA(
+        "CONCAT_COMMA",
+        BaseMessages.getString(PKG, "DenormaliserTargetField.TypeAggrLongDesc.ConcatComma"),
+        6);
+
+    private final String code;
+    private final String description;
+    private final int defaultResultType;
+
+    DenormaliseAggregation(String code, String description, int defaultResultType) {
+      this.code = code;
+      this.description = description;
+      this.defaultResultType = defaultResultType;
+    }
+
+    /**
+     * Get the Descriptions
+     *
+     * @return The Aggregation type descriptions
+     */
+    public static String[] getDescriptions() {
+      String[] descriptions = new String[values().length];
+      for (int i = 0; i < descriptions.length; i++) {
+        descriptions[i] = values()[i].getDescription();
+      }
+      return descriptions;
+    }
+
+    /**
+     * Gets code
+     *
+     * @return value of code
+     */
+    public String getCode() {
+      return code;
+    }
+
+    /**
+     * Gets description
+     *
+     * @return value of description
+     */
+    public String getDescription() {
+      return description;
+    }
+
+    /**
+     * Gets defaultResultType
+     *
+     * @return value of defaultResultType
+     */
+    public int getDefaultResultType() {
+      return defaultResultType;
+    }
+
+    public static DenormaliseAggregation getTypeWithDescription(String description) {
+      for (DenormaliseAggregation value : values()) {
+        if (value.getDescription().equals(description)) {
+          return value;
+        }
+      }
+      return TYPE_AGGR_NONE;
+    }
+  }
+
+  /** Create an empty pivot target field */
+  public DenormaliserTargetField() {
+    this.targetAggregationType = DenormaliseAggregation.TYPE_AGGR_NONE;
+  }
+
+  public DenormaliserTargetField(DenormaliserTargetField t) {
+    this.fieldName = t.fieldName;
+    this.keyValue = t.keyValue;
+    this.targetName = t.targetName;
+    this.targetType = t.targetType;
+    this.targetLength = t.targetLength;
+    this.targetPrecision = t.targetPrecision;
+    this.targetCurrencySymbol = t.targetCurrencySymbol;
+    this.targetDecimalSymbol = t.targetDecimalSymbol;
+    this.targetGroupingSymbol = t.targetGroupingSymbol;
+    this.targetNullString = t.targetNullString;
+    this.targetFormat = t.targetFormat;
+    this.targetAggregationType = t.targetAggregationType;
+  }
+
+  public DenormaliserTargetField(
+      String fieldName,
+      String keyValue,
+      String targetName,
+      String targetType,
+      int targetLength,
+      int targetPrecision,
+      String targetCurrencySymbol,
+      String targetDecimalSymbol,
+      String targetGroupingSymbol,
+      String targetNullString,
+      String targetFormat,
+      DenormaliseAggregation targetAggregationType) {
+    this.fieldName = fieldName;
+    this.keyValue = keyValue;
+    this.targetName = targetName;
+    this.targetType = targetType;
+    this.targetLength = targetLength;
+    this.targetPrecision = targetPrecision;
+    this.targetCurrencySymbol = targetCurrencySymbol;
+    this.targetDecimalSymbol = targetDecimalSymbol;
+    this.targetGroupingSymbol = targetGroupingSymbol;
+    this.targetNullString = targetNullString;
+    this.targetFormat = targetFormat;
+    this.targetAggregationType = targetAggregationType;
+  }
+
+  public DenormaliserTargetField clone() {
+    return new DenormaliserTargetField(this);
+  }
+
+  /** @return Returns the fieldName. */
   public String getFieldName() {
     return fieldName;
   }
 
-  /**
-   * @param fieldName The fieldName to set.
-   */
-  public void setFieldName( String fieldName ) {
+  /** @param fieldName The fieldName to set. */
+  public void setFieldName(String fieldName) {
     this.fieldName = fieldName;
   }
 
-  /**
-   * @return Returns the targetFormat.
-   */
+  /** @return Returns the targetFormat. */
   public String getTargetFormat() {
     return targetFormat;
   }
 
-  /**
-   * @param targetFormat The targetFormat to set.
-   */
-  public void setTargetFormat( String targetFormat ) {
+  /** @param targetFormat The targetFormat to set. */
+  public void setTargetFormat(String targetFormat) {
     this.targetFormat = targetFormat;
   }
 
-  /**
-   * Create an empty pivot target field
-   */
-  public DenormaliserTargetField() {
-  }
-
-  /**
-   * @return Returns the keyValue.
-   */
+  /** @return Returns the keyValue. */
   public String getKeyValue() {
     return keyValue;
   }
 
-  /**
-   * @param keyValue The keyValue to set.
-   */
-  public void setKeyValue( String keyValue ) {
+  /** @param keyValue The keyValue to set. */
+  public void setKeyValue(String keyValue) {
     this.keyValue = keyValue;
   }
 
-  /**
-   * @return Returns the targetCurrencySymbol.
-   */
+  /** @return Returns the targetCurrencySymbol. */
   public String getTargetCurrencySymbol() {
     return targetCurrencySymbol;
   }
 
-  /**
-   * @param targetCurrencySymbol The targetCurrencySymbol to set.
-   */
-  public void setTargetCurrencySymbol( String targetCurrencySymbol ) {
+  /** @param targetCurrencySymbol The targetCurrencySymbol to set. */
+  public void setTargetCurrencySymbol(String targetCurrencySymbol) {
     this.targetCurrencySymbol = targetCurrencySymbol;
   }
 
-  /**
-   * @return Returns the targetDecimalSymbol.
-   */
+  /** @return Returns the targetDecimalSymbol. */
   public String getTargetDecimalSymbol() {
     return targetDecimalSymbol;
   }
 
-  /**
-   * @param targetDecimalSymbol The targetDecimalSymbol to set.
-   */
-  public void setTargetDecimalSymbol( String targetDecimalSymbol ) {
+  /** @param targetDecimalSymbol The targetDecimalSymbol to set. */
+  public void setTargetDecimalSymbol(String targetDecimalSymbol) {
     this.targetDecimalSymbol = targetDecimalSymbol;
   }
 
-  /**
-   * @return Returns the targetGroupingSymbol.
-   */
+  /** @return Returns the targetGroupingSymbol. */
   public String getTargetGroupingSymbol() {
     return targetGroupingSymbol;
   }
 
-  /**
-   * @param targetGroupingSymbol The targetGroupingSymbol to set.
-   */
-  public void setTargetGroupingSymbol( String targetGroupingSymbol ) {
+  /** @param targetGroupingSymbol The targetGroupingSymbol to set. */
+  public void setTargetGroupingSymbol(String targetGroupingSymbol) {
     this.targetGroupingSymbol = targetGroupingSymbol;
   }
 
-  /**
-   * @return Returns the targetLength.
-   */
+  /** @return Returns the targetLength. */
   public int getTargetLength() {
     return targetLength;
   }
 
-  /**
-   * @param targetLength The targetLength to set.
-   */
-  public void setTargetLength( int targetLength ) {
+  /** @param targetLength The targetLength to set. */
+  public void setTargetLength(int targetLength) {
     this.targetLength = targetLength;
   }
 
-  /**
-   * @return Returns the targetName.
-   */
+  /** @return Returns the targetName. */
   public String getTargetName() {
     return targetName;
   }
 
-  /**
-   * @param targetName The targetName to set.
-   */
-  public void setTargetName( String targetName ) {
+  /** @param targetName The targetName to set. */
+  public void setTargetName(String targetName) {
     this.targetName = targetName;
   }
 
-  /**
-   * @return Returns the targetNullString.
-   */
+  /** @return Returns the targetNullString. */
   public String getTargetNullString() {
     return targetNullString;
   }
 
-  /**
-   * @param targetNullString The targetNullString to set.
-   */
-  public void setTargetNullString( String targetNullString ) {
+  /** @param targetNullString The targetNullString to set. */
+  public void setTargetNullString(String targetNullString) {
     this.targetNullString = targetNullString;
   }
 
-  /**
-   * @return Returns the targetPrecision.
-   */
+  /** @return Returns the targetPrecision. */
   public int getTargetPrecision() {
     return targetPrecision;
   }
 
-  /**
-   * @param targetPrecision The targetPrecision to set.
-   */
-  public void setTargetPrecision( int targetPrecision ) {
+  /** @param targetPrecision The targetPrecision to set. */
+  public void setTargetPrecision(int targetPrecision) {
     this.targetPrecision = targetPrecision;
   }
 
-  /**
-   * @return Returns the targetType.
-   */
-  public int getTargetType() {
+  /** @return Returns the targetType. */
+  public String getTargetType() {
     return targetType;
   }
 
-  /**
-   * @param targetType The targetType to set.
-   */
-  public void setTargetType( int targetType ) {
+  /** @param targetType The targetType to set. */
+  public void setTargetType(String targetType) {
     this.targetType = targetType;
   }
 
   /**
-   * @return The description of the target Value type
-   */
-  public String getTargetTypeDesc() {
-    return ValueMetaFactory.getValueMetaName( targetType );
-  }
-
-  /**
    * Set the target type
    *
    * @param typeDesc the target value type description
    */
-  public void setTargetType( String typeDesc ) {
-    targetType = ValueMetaFactory.getIdForValueMeta( typeDesc );
+  public void setTargetType(int typeDesc) {
+    targetType = ValueMetaFactory.getValueMetaName(typeDesc);
   }
 
   /**
-   * @return The target aggregation type: when a key-value collision occurs, what it the aggregation to use.
+   * @return The target aggregation type: when a key-value collision occurs, what it the aggregation
+   *     to use.
    */
-  public int getTargetAggregationType() {
+  public DenormaliseAggregation getTargetAggregationType() {
     return targetAggregationType;
   }
 
   /**
-   * @param targetAggregationType Specify the The aggregation type: when a key-value collision occurs, what it the aggregation to use.
+   * @param targetAggregationType Specify the The aggregation type: when a key-value collision
+   *     occurs, what it the aggregation to use.
    */
-  public void setTargetAggregationType( int targetAggregationType ) {
+  public void setTargetAggregationType(DenormaliseAggregation targetAggregationType) {
     this.targetAggregationType = targetAggregationType;
   }
-
-  public static final int getAggregationType( String desc ) {
-    for ( int i = 0; i < typeAggrDesc.length; i++ ) {
-      if ( typeAggrDesc[ i ].equalsIgnoreCase( desc ) ) {
-        return i;
-      }
-    }
-    for ( int i = 0; i < typeAggrLongDesc.length; i++ ) {
-      if ( typeAggrLongDesc[ i ].equalsIgnoreCase( desc ) ) {
-        return i;
-      }
-    }
-    return 0;
-  }
-
-  public static final String getAggregationTypeDesc( int i ) {
-    if ( i < 0 || i >= typeAggrDesc.length ) {
-      return null;
-    }
-    return typeAggrDesc[ i ];
-  }
-
-  public static final String getAggregationTypeDescLong( int i ) {
-    if ( i < 0 || i >= typeAggrLongDesc.length ) {
-      return null;
-    }
-    return typeAggrLongDesc[ i ];
-  }
-
-  public void setTargetAggregationType( String aggregationType ) {
-    targetAggregationType = getAggregationType( aggregationType );
-  }
-
-  public String getTargetAggregationTypeDesc() {
-    return getAggregationTypeDesc( targetAggregationType );
-  }
-
-  public String getTargetAggregationTypeDescLong() {
-    return getAggregationTypeDescLong( targetAggregationType );
-  }
-
 }
diff --git a/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserAggregationsTest.java b/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserAggregationsTest.java
index 60be406..7d85ec6 100644
--- a/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserAggregationsTest.java
+++ b/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserAggregationsTest.java
@@ -53,11 +53,10 @@ public class DenormaliserAggregationsTest {
   @BeforeClass
   public static void setUpBeforeClass() throws Exception {
     mockHelper =
-      new TransformMockHelper<>( "Denormaliser", DenormaliserMeta.class,
-        DenormaliserData.class );
-    when( mockHelper.logChannelFactory.create( any(), any( ILoggingObject.class ) ) ).thenReturn(
-      mockHelper.iLogChannel );
-    when( mockHelper.pipeline.isRunning() ).thenReturn( true );
+        new TransformMockHelper<>("Denormaliser", DenormaliserMeta.class, DenormaliserData.class);
+    when(mockHelper.logChannelFactory.create(any(), any(ILoggingObject.class)))
+        .thenReturn(mockHelper.iLogChannel);
+    when(mockHelper.pipeline.isRunning()).thenReturn(true);
   }
 
   @AfterClass
@@ -67,8 +66,10 @@ public class DenormaliserAggregationsTest {
 
   @Before
   public void setUp() throws Exception {
-    Mockito.when( mockHelper.transformMeta.getTransform() ).thenReturn( meta );
-    transform = new Denormaliser( mockHelper.transformMeta, meta, data, 0, mockHelper.pipelineMeta, mockHelper.pipeline );
+    Mockito.when(mockHelper.transformMeta.getTransform()).thenReturn(meta);
+    transform =
+        new Denormaliser(
+            mockHelper.transformMeta, meta, data, 0, mockHelper.pipelineMeta, mockHelper.pipeline);
   }
 
   /**
@@ -79,23 +80,27 @@ public class DenormaliserAggregationsTest {
   @Test
   public void testDenormalizeSum100PlusNull() throws HopValueException {
     // prevTargetData
-    Long sto = new Long( 100 );
-    data.targetResult = new Object[] { sto };
+    Long sto = new Long(100);
+    data.targetResult = new Object[] {sto};
 
-    transform.deNormalise( testSumPreconditions( "SUM" ), new Object[] { JUNIT, null } );
+    transform.deNormalise(
+        testSumPreconditions(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_SUM),
+        new Object[] {JUNIT, null});
 
-    Assert.assertEquals( "100 + null = 100 ", sto, data.targetResult[ 0 ] );
+    Assert.assertEquals("100 + null = 100 ", sto, data.targetResult[0]);
   }
 
   @Test
   public void testDenormalizeSumNullPlus100() throws HopValueException {
     // prevTargetData
-    Long sto = new Long( 100 );
-    data.targetResult = new Object[] { null };
+    Long sto = new Long(100);
+    data.targetResult = new Object[] {null};
 
-    transform.deNormalise( testSumPreconditions( "SUM" ), new Object[] { JUNIT, sto } );
+    transform.deNormalise(
+        testSumPreconditions(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_SUM),
+        new Object[] {JUNIT, sto});
 
-    Assert.assertEquals( "null + 100 = 100 ", sto, data.targetResult[ 0 ] );
+    Assert.assertEquals("null + 100 = 100 ", sto, data.targetResult[0]);
   }
 
   /**
@@ -105,14 +110,16 @@ public class DenormaliserAggregationsTest {
    */
   @Test
   public void testDenormalizeMinValueY() throws HopValueException {
-    transform.setMinNullIsValued( true );
+    transform.setMinNullIsValued(true);
 
-    Long trinadzat = new Long( -13 );
-    data.targetResult = new Object[] { trinadzat };
+    Long trinadzat = new Long(-13);
+    data.targetResult = new Object[] {trinadzat};
 
-    transform.deNormalise( testSumPreconditions( "MIN" ), new Object[] { JUNIT, null } );
+    transform.deNormalise(
+        testSumPreconditions(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_MIN),
+        new Object[] {JUNIT, null});
 
-    Assert.assertNull( "Null now is new minimal", data.targetResult[ 0 ] );
+    Assert.assertNull("Null now is new minimal", data.targetResult[0]);
   }
 
   /**
@@ -122,14 +129,16 @@ public class DenormaliserAggregationsTest {
    */
   @Test
   public void testDenormalizeMinValueN() throws HopValueException {
-    transform.setVariable( Const.HOP_AGGREGATION_MIN_NULL_IS_VALUED, "N" );
+    transform.setVariable(Const.HOP_AGGREGATION_MIN_NULL_IS_VALUED, "N");
 
-    Long sto = new Long( 100 );
-    data.targetResult = new Object[] { sto };
+    Long sto = new Long(100);
+    data.targetResult = new Object[] {sto};
 
-    transform.deNormalise( testSumPreconditions( "MIN" ), new Object[] { JUNIT, null } );
+    transform.deNormalise(
+        testSumPreconditions(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_MIN),
+        new Object[] {JUNIT, null});
 
-    Assert.assertEquals( "Null is ignored", sto, data.targetResult[ 0 ] );
+    Assert.assertEquals("Null is ignored", sto, data.targetResult[0]);
   }
 
   /**
@@ -137,33 +146,34 @@ public class DenormaliserAggregationsTest {
    *
    * @return
    */
-  IRowMeta testSumPreconditions( String agg ) {
+  IRowMeta testSumPreconditions(DenormaliserTargetField.DenormaliseAggregation agg) {
 
     // create rmi for one string and 2 integers
     IRowMeta rmi = new RowMeta();
     List<IValueMeta> list = new ArrayList<>();
-    list.add( new ValueMetaString( "a" ) );
-    list.add( new ValueMetaInteger( "b" ) );
-    list.add( new ValueMetaInteger( "d" ) );
-    rmi.setValueMetaList( list );
+    list.add(new ValueMetaString("a"));
+    list.add(new ValueMetaInteger("b"));
+    list.add(new ValueMetaInteger("d"));
+    rmi.setValueMetaList(list);
 
     // denormalizer key field will be String 'Junit'
     data.keyValue = new HashMap<>();
     List<Integer> listInt = new ArrayList<>();
-    listInt.add( 0 );
-    data.keyValue.put( JUNIT, listInt );
+    listInt.add(0);
+    data.keyValue.put(JUNIT, listInt);
 
     // we will calculate sum for second field ( like ["JUNIT", 1] )
-    data.fieldNameIndex = new int[] { 1 };
+    data.fieldNameIndex = new int[] {1};
     data.inputRowMeta = rmi;
     data.outputRowMeta = rmi;
-    data.removeNrs = new int[] { -1 };
+    data.removeNrs = new int[] {-1};
 
     // we do create internal instance of output field wiht sum aggregation
     DenormaliserTargetField tField = new DenormaliserTargetField();
-    tField.setTargetAggregationType( agg );
-    DenormaliserTargetField[] pivotField = new DenormaliserTargetField[] { tField };
-    meta.setDenormaliserTargetField( pivotField );
+    tField.setTargetAggregationType(agg);
+    ArrayList<DenormaliserTargetField> pivotField = new ArrayList<>();
+    pivotField.add(new DenormaliserTargetField(tField));
+    meta.setDenormaliserTargetFields(pivotField);
 
     // return row meta interface to pass into denormalize method
     return rmi;
@@ -176,27 +186,31 @@ public class DenormaliserAggregationsTest {
    */
   @Test
   public void testBuildResultWithNullsY() throws HopValueException {
-    transform.setAllNullsAreZero( true );
+    transform.setAllNullsAreZero(true);
 
-    Object[] rowData = new Object[ 10 ];
-    data.targetResult = new Object[ 1 ];
+    Object[] rowData = new Object[10];
+    data.targetResult = new Object[1];
     // this removal of input rows?
-    IRowMeta rmi = testSumPreconditions( "-" );
-    data.removeNrs = new int[] { 0 };
-    Object[] outputRowData = transform.buildResult( rmi, rowData );
+    IRowMeta rmi =
+        testSumPreconditions(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_NONE);
+    data.removeNrs = new int[] {0};
+    Object[] outputRowData = transform.buildResult(rmi, rowData);
 
-    Assert.assertEquals( "Output row: nulls are zeros", new Long( 0 ), outputRowData[ 2 ] );
+    Assert.assertEquals("Output row: nulls are zeros", new Long(0), outputRowData[2]);
   }
 
   @Test
   public void testBuildResultWithNullsN() throws HopValueException {
-    transform.setAllNullsAreZero( false );
+    transform.setAllNullsAreZero(false);
 
-    Object[] rowData = new Object[ 10 ];
-    data.targetResult = new Object[ 1 ];
-    Object[] outputRowData = transform.buildResult( testSumPreconditions( "-" ), rowData );
+    Object[] rowData = new Object[10];
+    data.targetResult = new Object[1];
+    Object[] outputRowData =
+        transform.buildResult(
+            testSumPreconditions(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_NONE),
+            rowData);
 
-    Assert.assertNull( "Output row: nulls are nulls", outputRowData[ 3 ] );
+    Assert.assertNull("Output row: nulls are nulls", outputRowData[3]);
   }
 
   /**
@@ -207,28 +221,30 @@ public class DenormaliserAggregationsTest {
   @Test
   public void testNewGroup() throws Exception {
     DenormaliserTargetField field1 = new DenormaliserTargetField();
-    field1.setTargetAggregationType( "MIN" );
+    field1.setTargetAggregationType(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_MIN);
 
     DenormaliserTargetField field2 = new DenormaliserTargetField();
-    field2.setTargetAggregationType( "MIN" );
+    field2.setTargetAggregationType(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_MIN);
 
     DenormaliserTargetField field3 = new DenormaliserTargetField();
-    field3.setTargetAggregationType( "MIN" );
+    field3.setTargetAggregationType(DenormaliserTargetField.DenormaliseAggregation.TYPE_AGGR_MIN);
 
-    DenormaliserTargetField[] pivotField = new DenormaliserTargetField[] { field1, field2, field3 };
-    meta.setDenormaliserTargetField( pivotField );
-    data.counters = new long[ 3 ];
-    data.sum = new Object[ 3 ];
+    ArrayList<DenormaliserTargetField> pivotField = new ArrayList<>();
+    pivotField.add(field1);
+    pivotField.add(field2);
+    pivotField.add(field3);
+    meta.setDenormaliserTargetFields(pivotField);
+    data.counters = new long[3];
+    data.sum = new Object[3];
 
-    Method newGroupMethod = transform.getClass().getDeclaredMethod( "newGroup" );
-    newGroupMethod.setAccessible( true );
-    newGroupMethod.invoke( transform );
+    Method newGroupMethod = transform.getClass().getDeclaredMethod("newGroup");
+    newGroupMethod.setAccessible(true);
+    newGroupMethod.invoke(transform);
 
-    Assert.assertEquals( 3, data.targetResult.length );
+    Assert.assertEquals(3, data.targetResult.length);
 
-    for ( Object result : data.targetResult ) {
-      Assert.assertNull( result );
+    for (Object result : data.targetResult) {
+      Assert.assertNull(result);
     }
   }
-
 }
diff --git a/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/PDI4910_DenormaliserTest.java b/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserTest.java
similarity index 55%
rename from plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/PDI4910_DenormaliserTest.java
rename to plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserTest.java
index 394e3e9..dfff7b6 100644
--- a/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/PDI4910_DenormaliserTest.java
+++ b/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormaliserTest.java
@@ -17,7 +17,6 @@
 
 package org.apache.hop.pipeline.transforms.denormaliser;
 
-
 import org.apache.hop.core.logging.ILoggingObject;
 import org.apache.hop.core.row.IRowMeta;
 import org.apache.hop.core.row.RowMeta;
@@ -31,6 +30,7 @@ import org.junit.Before;
 import org.junit.Test;
 
 import java.lang.reflect.Method;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.List;
@@ -38,16 +38,16 @@ import java.util.List;
 import static org.mockito.Matchers.any;
 import static org.mockito.Mockito.when;
 
-public class PDI4910_DenormaliserTest {
+public class DenormaliserTest {
 
   private TransformMockHelper<DenormaliserMeta, DenormaliserData> mockHelper;
-  private Denormaliser denormaliser;
 
   @Before
   public void init() {
-    mockHelper = new TransformMockHelper<>( "Denormalizer", DenormaliserMeta.class, DenormaliserData.class );
-    when( mockHelper.logChannelFactory.create( any(), any( ILoggingObject.class ) ) )
-      .thenReturn( mockHelper.iLogChannel );
+    mockHelper =
+        new TransformMockHelper<>("Denormalizer", DenormaliserMeta.class, DenormaliserData.class);
+    when(mockHelper.logChannelFactory.create(any(), any(ILoggingObject.class)))
+        .thenReturn(mockHelper.iLogChannel);
   }
 
   @After
@@ -62,50 +62,52 @@ public class PDI4910_DenormaliserTest {
     DenormaliserData data = new DenormaliserData();
     data.keyFieldNr = 0;
     data.keyValue = new HashMap<>();
-    data.keyValue.put( "1", Arrays.asList( new Integer[] { 0, 1 } ) );
-    data.fieldNameIndex = new int[] { 1, 2 };
+    data.keyValue.put("1", Arrays.asList(new Integer[] {0, 1}));
+    data.fieldNameIndex = new int[] {1, 2};
     data.inputRowMeta = new RowMeta();
-    ValueMetaDate outDateField1 = new ValueMetaDate( "date_field[yyyy-MM-dd]" );
-    ValueMetaDate outDateField2 = new ValueMetaDate( "date_field[yyyy/MM/dd]" );
+    ValueMetaDate outDateField1 = new ValueMetaDate("date_field[yyyy-MM-dd]");
+    ValueMetaDate outDateField2 = new ValueMetaDate("date_field[yyyy/MM/dd]");
     data.outputRowMeta = new RowMeta();
-    data.outputRowMeta.addValueMeta( 0, outDateField1 );
-    data.outputRowMeta.addValueMeta( 1, outDateField2 );
+    data.outputRowMeta.addValueMeta(0, outDateField1);
+    data.outputRowMeta.addValueMeta(1, outDateField2);
     data.removeNrs = new int[] {};
-    data.targetResult = new Object[] { null, null };
+    data.targetResult = new Object[] {null, null};
 
     // init transform meta
     DenormaliserMeta meta = new DenormaliserMeta();
-    DenormaliserTargetField[] denormaliserTargetFields = new DenormaliserTargetField[ 2 ];
+    ArrayList<DenormaliserTargetField> denormaliserTargetFields = new ArrayList<>();
     DenormaliserTargetField targetField1 = new DenormaliserTargetField();
     DenormaliserTargetField targetField2 = new DenormaliserTargetField();
-    targetField1.setTargetFormat( "yyyy-MM-dd" );
-    targetField2.setTargetFormat( "yyyy/MM/dd" );
-    denormaliserTargetFields[ 0 ] = targetField1;
-    denormaliserTargetFields[ 1 ] = targetField2;
-    meta.setDenormaliserTargetField( denormaliserTargetFields );
+    targetField1.setTargetFormat("yyyy-MM-dd");
+    targetField2.setTargetFormat("yyyy/MM/dd");
+    denormaliserTargetFields.add(targetField1);
+    denormaliserTargetFields.add(targetField2);
+    meta.setDenormaliserTargetFields(denormaliserTargetFields);
 
     // init row meta
     IRowMeta rowMeta = new RowMeta();
-    rowMeta.addValueMeta( 0, new ValueMetaInteger( "key" ) );
-    rowMeta.addValueMeta( 1, new ValueMetaString( "stringDate1" ) );
-    rowMeta.addValueMeta( 2, new ValueMetaString( "stringDate2" ) );
+    rowMeta.addValueMeta(0, new ValueMetaInteger("key"));
+    rowMeta.addValueMeta(1, new ValueMetaString("stringDate1"));
+    rowMeta.addValueMeta(2, new ValueMetaString("stringDate2"));
 
     // init row data
-    Object[] rowData = new Object[] { 1L, "2000-10-20", "2000/10/20" };
+    Object[] rowData = new Object[] {1L, "2000-10-20", "2000/10/20"};
 
     // init transform
-    denormaliser = new Denormaliser( mockHelper.transformMeta, meta, data, 0, mockHelper.pipelineMeta, mockHelper.pipeline );
+    Denormaliser denormaliser =
+        new Denormaliser(
+            mockHelper.transformMeta, meta, data, 0, mockHelper.pipelineMeta, mockHelper.pipeline);
 
     // call tested method
-    Method deNormalise = denormaliser.getClass().getDeclaredMethod( "deNormalise", IRowMeta.class, Object[].class );
-    Assert.assertNotNull( "Can't find a method 'deNormalise' in class Denormalizer", deNormalise );
-    deNormalise.setAccessible( true );
-    deNormalise.invoke( denormaliser, rowMeta, rowData );
+    Method deNormalise =
+        denormaliser.getClass().getDeclaredMethod("deNormalise", IRowMeta.class, Object[].class);
+    Assert.assertNotNull("Can't find a method 'deNormalise' in class Denormalizer", deNormalise);
+    deNormalise.setAccessible(true);
+    deNormalise.invoke(denormaliser, rowMeta, rowData);
 
     // vefiry
-    for ( Object res : data.targetResult ) {
-      Assert.assertNotNull( "Date is null", res );
+    for (Object res : data.targetResult) {
+      Assert.assertNotNull("Date is null", res);
     }
   }
-
 }
diff --git a/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormalizerMetaTest.java b/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormalizerMetaTest.java
deleted file mode 100644
index 2971e11..0000000
--- a/plugins/transforms/denormaliser/src/test/java/org/apache/hop/pipeline/transforms/denormaliser/DenormalizerMetaTest.java
+++ /dev/null
@@ -1,137 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one or more
- * contributor license agreements.  See the NOTICE file distributed with
- * this work for additional information regarding copyright ownership.
- * The ASF licenses this file to You under the Apache License, Version 2.0
- * (the "License"); you may not use this file except in compliance with
- * the License.  You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hop.pipeline.transforms.denormaliser;
-
-import org.apache.commons.lang.builder.EqualsBuilder;
-import org.apache.hop.core.HopEnvironment;
-import org.apache.hop.core.exception.HopException;
-import org.apache.hop.core.plugins.PluginRegistry;
-import org.apache.hop.junit.rules.RestoreHopEngineEnvironment;
-import org.apache.hop.pipeline.transform.ITransformMeta;
-import org.apache.hop.pipeline.transforms.loadsave.LoadSaveTester;
-import org.apache.hop.pipeline.transforms.loadsave.initializer.IInitializer;
-import org.apache.hop.pipeline.transforms.loadsave.validator.ArrayLoadSaveValidator;
-import org.apache.hop.pipeline.transforms.loadsave.validator.IFieldLoadSaveValidator;
-import org.apache.hop.pipeline.transforms.loadsave.validator.StringLoadSaveValidator;
-import org.junit.Before;
-import org.junit.ClassRule;
-import org.junit.Test;
-
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.Random;
-import java.util.UUID;
-
-public class DenormalizerMetaTest implements IInitializer<ITransformMeta> {
-  LoadSaveTester loadSaveTester;
-  Class<DenormaliserMeta> testMetaClass = DenormaliserMeta.class;
-  @ClassRule public static RestoreHopEngineEnvironment env = new RestoreHopEngineEnvironment();
-
-  @Before
-  public void setUpLoadSave() throws Exception {
-    HopEnvironment.init();
-    PluginRegistry.init( false );
-    List<String> attributes =
-      Arrays.asList( "groupField", "keyField", "denormaliserTargetField" );
-
-    Map<String, String> getterMap = new HashMap<String, String>() {
-      //CHECKSTYLE IGNORE EmptyBlock FOR NEXT 3 LINES
-      {
-        // put( "fieldName", "getFieldName" );
-      }
-    };
-    Map<String, String> setterMap = new HashMap<String, String>() {
-      //CHECKSTYLE IGNORE EmptyBlock FOR NEXT 3 LINES
-      {
-        // put( "fieldName", "setFieldName" );
-      }
-    };
-    IFieldLoadSaveValidator<String[]> stringArrayLoadSaveValidator =
-      new ArrayLoadSaveValidator<>( new StringLoadSaveValidator(), 5 );
-
-    Map<String, IFieldLoadSaveValidator<?>> attrValidatorMap = new HashMap<>();
-    attrValidatorMap.put( "groupField", stringArrayLoadSaveValidator );
-    attrValidatorMap.put( "denormaliserTargetField",
-      new ArrayLoadSaveValidator<>( new DenormaliserTargetFieldLoadSaveValidator(), 5 ) );
-
-    Map<String, IFieldLoadSaveValidator<?>> typeValidatorMap = new HashMap<>();
-
-    loadSaveTester =
-      new LoadSaveTester( testMetaClass, attributes, new ArrayList<>(),
-        getterMap, setterMap, attrValidatorMap, typeValidatorMap, this );
-  }
-
-  // Call the allocate method on the LoadSaveTester meta class
-  @Override
-  public void modify( ITransformMeta someMeta ) {
-    if ( someMeta instanceof DenormaliserMeta ) {
-      ( (DenormaliserMeta) someMeta ).allocate( 5, 5 );
-    }
-  }
-
-  @Test
-  public void testSerialization() throws HopException {
-    loadSaveTester.testSerialization();
-  }
-
-  public class DenormaliserTargetFieldLoadSaveValidator implements IFieldLoadSaveValidator<DenormaliserTargetField> {
-    final Random rand = new Random();
-
-    @Override
-    public DenormaliserTargetField getTestObject() {
-      DenormaliserTargetField rtn = new DenormaliserTargetField();
-      rtn.setFieldName( UUID.randomUUID().toString() );
-      rtn.setKeyValue( UUID.randomUUID().toString() );
-      rtn.setTargetCurrencySymbol( UUID.randomUUID().toString() );
-      rtn.setTargetGroupingSymbol( UUID.randomUUID().toString() );
-      rtn.setTargetName( UUID.randomUUID().toString() );
-      rtn.setTargetType( rand.nextInt( 7 ) );
-      rtn.setTargetPrecision( rand.nextInt( 9 ) );
-      rtn.setTargetNullString( UUID.randomUUID().toString() );
-      rtn.setTargetLength( rand.nextInt( 50 ) );
-      rtn.setTargetDecimalSymbol( UUID.randomUUID().toString() );
-      rtn.setTargetAggregationType( rand.nextInt( DenormaliserTargetField.typeAggrDesc.length ) );
-      return rtn;
-    }
-
-    @Override
-    public boolean validateTestObject( DenormaliserTargetField testObject, Object actual ) {
-      if ( !( actual instanceof DenormaliserTargetField ) ) {
-        return false;
-      }
-      DenormaliserTargetField another = (DenormaliserTargetField) actual;
-      return new EqualsBuilder()
-        .append( testObject.getFieldName(), another.getFieldName() )
-        .append( testObject.getKeyValue(), another.getKeyValue() )
-        .append( testObject.getTargetName(), another.getTargetName() )
-        .append( testObject.getTargetType(), another.getTargetType() )
-        .append( testObject.getTargetLength(), another.getTargetLength() )
-        .append( testObject.getTargetPrecision(), another.getTargetPrecision() )
-        .append( testObject.getTargetCurrencySymbol(), another.getTargetCurrencySymbol() )
-        .append( testObject.getTargetDecimalSymbol(), another.getTargetDecimalSymbol() )
-        .append( testObject.getTargetGroupingSymbol(), another.getTargetGroupingSymbol() )
-        .append( testObject.getTargetNullString(), another.getTargetNullString() )
-        .append( testObject.getTargetFormat(), another.getTargetFormat() )
-        .append( testObject.getTargetAggregationType(), another.getTargetAggregationType() )
-        .isEquals();
-    }
-  }
-
-}