You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by mg...@apache.org on 2017/05/22 10:50:14 UTC

[2/4] ambari git commit: AMBARI-21033 Log Search use POJOs for input configuration (mgergely)

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
index ba872f8..4d6c43b 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputManager.java
@@ -70,7 +70,7 @@ public class OutputManager {
     Input input = inputMarker.input;
 
     // Update the block with the context fields
-    for (Map.Entry<String, String> entry : input.getContextFields().entrySet()) {
+    for (Map.Entry<String, String> entry : input.getInputDescriptor().getAddFields().entrySet()) {
       if (jsonObj.get(entry.getKey()) == null || entry.getKey().equals("cluster") && "null".equals(jsonObj.get(entry.getKey()))) {
         jsonObj.put(entry.getKey(), entry.getValue());
       }
@@ -79,13 +79,13 @@ public class OutputManager {
     // TODO: Ideally most of the overrides should be configurable
 
     if (jsonObj.get("type") == null) {
-      jsonObj.put("type", input.getStringValue("type"));
+      jsonObj.put("type", input.getInputDescriptor().getType());
     }
     if (jsonObj.get("path") == null && input.getFilePath() != null) {
       jsonObj.put("path", input.getFilePath());
     }
-    if (jsonObj.get("path") == null && input.getStringValue("path") != null) {
-      jsonObj.put("path", input.getStringValue("path"));
+    if (jsonObj.get("path") == null && input.getInputDescriptor().getPath() != null) {
+      jsonObj.put("path", input.getInputDescriptor().getPath());
     }
     if (jsonObj.get("host") == null && LogFeederUtil.hostName != null) {
       jsonObj.put("host", LogFeederUtil.hostName);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
index d0f51b2..076d12d 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/output/OutputS3File.java
@@ -19,9 +19,6 @@
 package org.apache.ambari.logfeeder.output;
 
 import com.google.common.annotations.VisibleForTesting;
-import com.google.gson.Gson;
-import com.google.gson.GsonBuilder;
-import org.apache.ambari.logfeeder.common.ConfigHandler;
 import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.InputMarker;
@@ -31,11 +28,18 @@ import org.apache.ambari.logfeeder.output.spool.RolloverCondition;
 import org.apache.ambari.logfeeder.output.spool.RolloverHandler;
 import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logfeeder.util.S3Util;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputConfigGson;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputConfigImpl;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputDescriptorImpl;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputS3FileDescriptorImpl;
 import org.apache.log4j.Logger;
 
 import java.io.File;
-import java.util.*;
-import java.util.Map.Entry;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Map;
 
 
 /**
@@ -50,7 +54,6 @@ import java.util.Map.Entry;
 public class OutputS3File extends Output implements RolloverCondition, RolloverHandler {
   private static final Logger LOG = Logger.getLogger(OutputS3File.class);
 
-  public static final String INPUT_ATTRIBUTE_TYPE = "type";
   public static final String GLOBAL_CONFIG_S3_PATH_SUFFIX = "global.config.json";
 
   private LogSpooler logSpooler;
@@ -72,9 +75,9 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
    */
   @Override
   public void copyFile(File inputFile, InputMarker inputMarker) {
-    String type = inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE);
+    String type = inputMarker.input.getInputDescriptor().getType();
     S3Uploader s3Uploader = new S3Uploader(s3OutputConfiguration, false, type);
-    String resolvedPath = s3Uploader.uploadFile(inputFile, inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE));
+    String resolvedPath = s3Uploader.uploadFile(inputFile, inputMarker.input.getInputDescriptor().getType());
 
     uploadConfig(inputMarker, type, s3OutputConfiguration, resolvedPath);
   }
@@ -82,43 +85,43 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
   private void uploadConfig(InputMarker inputMarker, String type, S3OutputConfiguration s3OutputConfiguration,
       String resolvedPath) {
 
-    ArrayList<Map<String, Object>> filters = new ArrayList<>();
+    ArrayList<FilterDescriptor> filters = new ArrayList<>();
     addFilters(filters, inputMarker.input.getFirstFilter());
-    Map<String, Object> inputConfig = new HashMap<>();
-    inputConfig.putAll(inputMarker.input.getConfigs());
+    InputS3FileDescriptor inputS3FileDescriptorOriginal = (InputS3FileDescriptor) inputMarker.input.getInputDescriptor();
+    InputS3FileDescriptorImpl inputS3FileDescriptor = InputConfigGson.gson.fromJson(
+        InputConfigGson.gson.toJson(inputS3FileDescriptorOriginal), InputS3FileDescriptorImpl.class);
     String s3CompletePath = LogFeederConstants.S3_PATH_START_WITH + s3OutputConfiguration.getS3BucketName() +
         LogFeederConstants.S3_PATH_SEPARATOR + resolvedPath;
-    inputConfig.put("path", s3CompletePath);
+    inputS3FileDescriptor.setPath(s3CompletePath);
 
-    ArrayList<Map<String, Object>> inputConfigList = new ArrayList<>();
-    inputConfigList.add(inputConfig);
+    ArrayList<InputDescriptorImpl> inputConfigList = new ArrayList<>();
+    inputConfigList.add(inputS3FileDescriptor);
     // set source s3_file
-    // remove global config from filter config
-    removeGlobalConfig(inputConfigList);
-    removeGlobalConfig(filters);
+    // remove global config from input config
+    removeS3GlobalConfig(inputS3FileDescriptor);
     // write config into s3 file
-    Map<String, Object> config = new HashMap<>();
-    config.put("filter", filters);
-    config.put("input", inputConfigList);
-    writeConfigToS3(config, getComponentConfigFileName(type), s3OutputConfiguration);
+    InputConfigImpl inputConfig = new InputConfigImpl();
+    inputConfig.setInput(inputConfigList);
+    
+    writeConfigToS3(inputConfig, getComponentConfigFileName(type), s3OutputConfiguration);
     // write global config
     writeGlobalConfig(s3OutputConfiguration);
   }
 
-  private void addFilters(ArrayList<Map<String, Object>> filters, Filter filter) {
+  private void addFilters(ArrayList<FilterDescriptor> filters, Filter filter) {
     if (filter != null) {
-      Map<String, Object> filterConfig = new HashMap<String, Object>();
-      filterConfig.putAll(filter.getConfigs());
-      filters.add(filterConfig);
+      FilterDescriptor filterDescriptorOriginal = filter.getFilterDescriptor();
+      FilterDescriptor filterDescriptor = InputConfigGson.gson.fromJson(
+          InputConfigGson.gson.toJson(filterDescriptorOriginal), filterDescriptorOriginal.getClass());
+      filters.add(filterDescriptor);
       if (filter.getNextFilter() != null) {
         addFilters(filters, filter.getNextFilter());
       }
     }
   }
 
-  private void writeConfigToS3(Map<String, Object> configToWrite, String s3KeySuffix, S3OutputConfiguration s3OutputConfiguration) {
-    Gson gson = new GsonBuilder().setPrettyPrinting().create();
-    String configJson = gson.toJson(configToWrite);
+  private void writeConfigToS3(Object config, String s3KeySuffix, S3OutputConfiguration s3OutputConfiguration) {
+    String configJson = InputConfigGson.gson.toJson(config);
 
     String s3ResolvedKey = new S3LogPathResolver().getResolvedPath(getStringValue("s3_config_dir"), s3KeySuffix,
         s3OutputConfiguration.getCluster());
@@ -131,31 +134,14 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
     return "input.config-" + componentName + ".json";
   }
 
-
-  private Map<String, Object> getGlobalConfig() {
-    Map<String, Object> globalConfig = ConfigHandler.globalConfigs;
-    if (globalConfig == null) {
-      globalConfig = new HashMap<>();
-    }
-    return globalConfig;
-  }
-
-  private void removeGlobalConfig(List<Map<String, Object>> configList) {
-    Map<String, Object> globalConfig = getGlobalConfig();
-    if (configList != null && globalConfig != null) {
-      for (Entry<String, Object> globalConfigEntry : globalConfig.entrySet()) {
-        if (globalConfigEntry != null) {
-          String globalKey = globalConfigEntry.getKey();
-          if (globalKey != null && !globalKey.trim().isEmpty()) {
-            for (Map<String, Object> config : configList) {
-              if (config != null) {
-                config.remove(globalKey);
-              }
-            }
-          }
-        }
-      }
-    }
+  private void removeS3GlobalConfig(InputS3FileDescriptorImpl inputS3FileDescriptor) {
+    inputS3FileDescriptor.setSource(null);
+    inputS3FileDescriptor.setCopyFile(null);
+    inputS3FileDescriptor.setProcessFile(null);
+    inputS3FileDescriptor.setTail(null);
+    inputS3FileDescriptor.getAddFields().remove("ip");
+    inputS3FileDescriptor.getAddFields().remove("host");
+    inputS3FileDescriptor.getAddFields().remove("bundle_id");
   }
 
   /**
@@ -164,7 +150,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
   @SuppressWarnings("unchecked")
   private synchronized void writeGlobalConfig(S3OutputConfiguration s3OutputConfiguration) {
     if (!uploadedGlobalConfig) {
-      Map<String, Object> globalConfig = LogFeederUtil.cloneObject(getGlobalConfig());
+      Map<String, Object> globalConfig = new HashMap<>();
       //updating global config before write to s3
       globalConfig.put("source", "s3_file");
       globalConfig.put("copy_file", false);
@@ -205,7 +191,7 @@ public class OutputS3File extends Output implements RolloverCondition, RolloverH
   public void write(String block, InputMarker inputMarker) throws Exception {
     if (logSpooler == null) {
       logSpooler = createSpooler(inputMarker.input.getFilePath());
-      s3Uploader = createUploader(inputMarker.input.getStringValue(INPUT_ATTRIBUTE_TYPE));
+      s3Uploader = createUploader(inputMarker.input.getInputDescriptor().getType());
     }
     logSpooler.add(block);
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
index 1929178..d8a1fbb 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/main/java/org/apache/ambari/logfeeder/util/LogFeederUtil.java
@@ -27,13 +27,11 @@ import java.net.InetAddress;
 import java.net.UnknownHostException;
 import java.util.HashMap;
 import java.util.Hashtable;
-import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
 import org.apache.ambari.logfeeder.LogFeeder;
 import org.apache.ambari.logfeeder.metrics.MetricData;
-import org.apache.commons.collections.MapUtils;
 import org.apache.commons.lang3.StringUtils;
 import org.apache.log4j.Level;
 import org.apache.log4j.Logger;
@@ -205,55 +203,6 @@ public class LogFeederUtil {
     return retValue;
   }
 
-  @SuppressWarnings("unchecked")
-  public static boolean isEnabled(Map<String, Object> conditionConfigs, Map<String, Object> valueConfigs) {
-    Map<String, Object> conditions = (Map<String, Object>) conditionConfigs.get("conditions");
-    if (MapUtils.isEmpty(conditions)) {
-      return toBoolean((String) valueConfigs.get("is_enabled"), true);
-    }
-    
-    for (String conditionType : conditions.keySet()) {
-      if (!conditionType.equalsIgnoreCase("fields")) {
-        continue;
-      }
-      
-      Map<String, Object> fields = (Map<String, Object>) conditions.get("fields");
-      for (Map.Entry<String, Object> field : fields.entrySet()) {
-        if (field.getValue() instanceof String) {
-          if (isFieldConditionMatch(valueConfigs, field.getKey(), (String) field.getValue())) {
-            return true;
-          }
-        } else {
-          for (String stringValue : (List<String>) field.getValue()) {
-            if (isFieldConditionMatch(valueConfigs, field.getKey(), stringValue)) {
-              return true;
-            }
-          }
-        }
-      }
-    }
-    
-    return false;
-  }
-
-  private static boolean isFieldConditionMatch(Map<String, Object> configs, String fieldName, String stringValue) {
-    boolean allow = false;
-    String fieldValue = (String) configs.get(fieldName);
-    if (fieldValue != null && fieldValue.equalsIgnoreCase(stringValue)) {
-      allow = true;
-    } else {
-      @SuppressWarnings("unchecked")
-      Map<String, Object> addFields = (Map<String, Object>) configs.get("add_fields");
-      if (addFields != null && addFields.get(fieldName) != null) {
-        String addFieldValue = (String) addFields.get(fieldName);
-        if (stringValue.equalsIgnoreCase(addFieldValue)) {
-          allow = true;
-        }
-      }
-    }
-    return allow;
-  }
-
   public static void logStatForMetric(MetricData metric, String prefixStr, String postFix) {
     long currStat = metric.value;
     long currMS = System.currentTimeMillis();

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
index 99565c5..8d7e86c 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterGrokTest.java
@@ -18,12 +18,13 @@
 
 package org.apache.ambari.logfeeder.filter;
 
-import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.output.OutputManager;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.FilterGrokDescriptorImpl;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -43,12 +44,12 @@ public class FilterGrokTest {
   private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
-  public void init(Map<String, Object> config) throws Exception {
+  public void init(FilterGrokDescriptor filterGrokDescriptor) throws Exception {
     mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterGrok = new FilterGrok();
-    filterGrok.loadConfig(config);
+    filterGrok.loadConfig(filterGrokDescriptor);
     filterGrok.setOutputManager(mockOutputManager);
     filterGrok.setInput(EasyMock.mock(Input.class));
     filterGrok.init();
@@ -58,10 +59,10 @@ public class FilterGrokTest {
   public void testFilterGrok_parseMessage() throws Exception {
     LOG.info("testFilterGrok_parseMessage()");
 
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
-    config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
-    init(config);
+    FilterGrokDescriptorImpl filterGrokDescriptor = new FilterGrokDescriptorImpl();
+    filterGrokDescriptor.setMessagePattern("(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    filterGrokDescriptor.setMultilinePattern("^(%{TIMESTAMP_ISO8601:logtime})");
+    init(filterGrokDescriptor);
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
@@ -84,10 +85,10 @@ public class FilterGrokTest {
   public void testFilterGrok_parseMultiLineMessage() throws Exception {
     LOG.info("testFilterGrok_parseMultiLineMessage()");
 
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
-    config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
-    init(config);
+    FilterGrokDescriptorImpl filterGrokDescriptor = new FilterGrokDescriptorImpl();
+    filterGrokDescriptor.setMessagePattern("(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    filterGrokDescriptor.setMultilinePattern("^(%{TIMESTAMP_ISO8601:logtime})");
+    init(filterGrokDescriptor);
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
@@ -114,10 +115,10 @@ public class FilterGrokTest {
   public void testFilterGrok_notMatchingMesagePattern() throws Exception {
     LOG.info("testFilterGrok_notMatchingMesagePattern()");
 
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("message_pattern", "(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
-    config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
-    init(config);
+    FilterGrokDescriptorImpl filterGrokDescriptor = new FilterGrokDescriptorImpl();
+    filterGrokDescriptor.setMessagePattern("(?m)^%{TIMESTAMP_ISO8601:logtime}%{SPACE}%{LOGLEVEL:level}%{SPACE}%{GREEDYDATA:log_message}");
+    filterGrokDescriptor.setMultilinePattern("^(%{TIMESTAMP_ISO8601:logtime})");
+    init(filterGrokDescriptor);
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall().anyTimes();
@@ -134,9 +135,9 @@ public class FilterGrokTest {
   public void testFilterGrok_noMesagePattern() throws Exception {
     LOG.info("testFilterGrok_noMesagePattern()");
 
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("multiline_pattern", "^(%{TIMESTAMP_ISO8601:logtime})");
-    init(config);
+    FilterGrokDescriptorImpl filterGrokDescriptor = new FilterGrokDescriptorImpl();
+    filterGrokDescriptor.setMultilinePattern("^(%{TIMESTAMP_ISO8601:logtime})");
+    init(filterGrokDescriptor);
 
     EasyMock.replay(mockOutputManager);
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
index 643dafc..8f75c3a 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterJSONTest.java
@@ -21,7 +21,6 @@ package org.apache.ambari.logfeeder.filter;
 import java.text.DateFormat;
 import java.text.SimpleDateFormat;
 import java.util.Date;
-import java.util.HashMap;
 import java.util.Map;
 import java.util.TimeZone;
 
@@ -29,6 +28,7 @@ import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.common.LogfeederException;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.output.OutputManager;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.FilterJsonDescriptorImpl;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
 import org.easymock.CaptureType;
@@ -47,12 +47,12 @@ public class FilterJSONTest {
   private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
-  public void init(Map<String, Object> params) throws Exception {
+  public void init(FilterJsonDescriptorImpl filterJsonDescriptor) throws Exception {
     mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterJson = new FilterJSON();
-    filterJson.loadConfig(params);
+    filterJson.loadConfig(filterJsonDescriptor);
     filterJson.setOutputManager(mockOutputManager);
     filterJson.init();
   }
@@ -61,7 +61,7 @@ public class FilterJSONTest {
   public void testJSONFilterCode_convertFields() throws Exception {
     LOG.info("testJSONFilterCode_convertFields()");
 
-    init(new HashMap<String, Object>());
+    init(new FilterJsonDescriptorImpl());
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
@@ -86,7 +86,7 @@ public class FilterJSONTest {
   public void testJSONFilterCode_logTimeOnly() throws Exception {
     LOG.info("testJSONFilterCode_logTimeOnly()");
 
-    init(new HashMap<String, Object>());
+    init(new FilterJsonDescriptorImpl());
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
@@ -111,7 +111,7 @@ public class FilterJSONTest {
   public void testJSONFilterCode_lineNumberOnly() throws Exception {
     LOG.info("testJSONFilterCode_lineNumberOnly()");
 
-    init(new HashMap<String, Object>());
+    init(new FilterJsonDescriptorImpl());
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
@@ -131,7 +131,7 @@ public class FilterJSONTest {
   @Test
   public void testJSONFilterCode_invalidJson() throws Exception {
     LOG.info("testJSONFilterCode_invalidJson()");
-    init(new HashMap<String, Object>());
+    init(new FilterJsonDescriptorImpl());
     String inputStr="invalid json";
     try{
     filterJson.apply(inputStr,new InputMarker(null, null, 0));

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
index 05647e6..ae978fb 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/filter/FilterKeyValueTest.java
@@ -18,10 +18,11 @@
 
 package org.apache.ambari.logfeeder.filter;
 
-import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.ambari.logfeeder.output.OutputManager;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.FilterKeyValueDescriptorImpl;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.log4j.Logger;
 import org.easymock.Capture;
@@ -41,12 +42,12 @@ public class FilterKeyValueTest {
   private OutputManager mockOutputManager;
   private Capture<Map<String, Object>> capture;
 
-  public void init(Map<String, Object> config) throws Exception {
+  public void init(FilterKeyValueDescriptor filterKeyValueDescriptor) throws Exception {
     mockOutputManager = EasyMock.strictMock(OutputManager.class);
     capture = EasyMock.newCapture(CaptureType.LAST);
 
     filterKeyValue = new FilterKeyValue();
-    filterKeyValue.loadConfig(config);
+    filterKeyValue.loadConfig(filterKeyValueDescriptor);
     filterKeyValue.setOutputManager(mockOutputManager);
     filterKeyValue.init();
   }
@@ -55,11 +56,10 @@ public class FilterKeyValueTest {
   public void testFilterKeyValue_extraction() throws Exception {
     LOG.info("testFilterKeyValue_extraction()");
 
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("source_field", "keyValueField");
-    config.put("field_split", "&");
-    // using default value split:
-    init(config);
+    FilterKeyValueDescriptorImpl filterKeyValueDescriptor = new FilterKeyValueDescriptorImpl();
+    filterKeyValueDescriptor.setSourceField("keyValueField");
+    filterKeyValueDescriptor.setFieldSplit("&");
+    init(filterKeyValueDescriptor);
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
@@ -80,11 +80,11 @@ public class FilterKeyValueTest {
   public void testFilterKeyValue_extractionWithBorders() throws Exception {
     LOG.info("testFilterKeyValue_extractionWithBorders()");
 
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("source_field", "keyValueField");
-    config.put("field_split", "&");
-    config.put("value_borders", "()");
-    init(config);
+    FilterKeyValueDescriptorImpl filterKeyValueDescriptor = new FilterKeyValueDescriptorImpl();
+    filterKeyValueDescriptor.setSourceField("keyValueField");
+    filterKeyValueDescriptor.setFieldSplit("&");
+    filterKeyValueDescriptor.setValueBorders("()");
+    init(filterKeyValueDescriptor);
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall();
@@ -105,10 +105,9 @@ public class FilterKeyValueTest {
   public void testFilterKeyValue_missingSourceField() throws Exception {
     LOG.info("testFilterKeyValue_missingSourceField()");
 
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("field_split", "&");
-    // using default value split: =
-    init(config);
+    FilterKeyValueDescriptorImpl filterKeyValueDescriptor = new FilterKeyValueDescriptorImpl();
+    filterKeyValueDescriptor.setFieldSplit("&");
+    init(filterKeyValueDescriptor);
 
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));
     EasyMock.expectLastCall().anyTimes();
@@ -124,10 +123,10 @@ public class FilterKeyValueTest {
   public void testFilterKeyValue_noSourceFieldPresent() throws Exception {
     LOG.info("testFilterKeyValue_noSourceFieldPresent()");
 
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("source_field", "keyValueField");
-    config.put("field_split", "&");
-    init(config);
+    FilterKeyValueDescriptorImpl filterKeyValueDescriptor = new FilterKeyValueDescriptorImpl();
+    filterKeyValueDescriptor.setSourceField("keyValueField");
+    filterKeyValueDescriptor.setFieldSplit("&");
+    init(filterKeyValueDescriptor);
 
     // using default value split: =
     mockOutputManager.write(EasyMock.capture(capture), EasyMock.anyObject(InputMarker.class));

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
index 522f6bb..3a5f31e 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/input/InputFileTest.java
@@ -22,12 +22,11 @@ import java.io.File;
 import java.io.IOException;
 import java.nio.charset.Charset;
 import java.util.ArrayList;
-import java.util.HashMap;
 import java.util.List;
-import java.util.Map;
 
 import org.apache.ambari.logfeeder.filter.Filter;
 import org.apache.ambari.logfeeder.input.InputMarker;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputFileDescriptorImpl;
 import org.apache.commons.io.FileUtils;
 import org.apache.log4j.Logger;
 import org.easymock.EasyMock;
@@ -78,15 +77,14 @@ public class InputFileTest {
   }
 
   public void init(String path) throws Exception {
-    Map<String, Object> config = new HashMap<String, Object>();
-    config.put("source", "file");
-    config.put("tail", "true");
-    config.put("gen_event_md5", "true");
-    config.put("start_position", "beginning");
-
-    config.put("type", "hdfs_datanode");
-    config.put("rowtype", "service");
-    config.put("path", path);
+    InputFileDescriptorImpl inputFileDescriptor = new InputFileDescriptorImpl();
+    inputFileDescriptor.setSource("file");
+    inputFileDescriptor.setTail(true);
+    inputFileDescriptor.setGenEventMd5(true);
+    inputFileDescriptor.setStartPosition("beginning");
+    inputFileDescriptor.setType("hdfs_datanode");
+    inputFileDescriptor.setRowtype("service");
+    inputFileDescriptor.setPath(path);
 
     Filter capture = new Filter() {
       @Override
@@ -104,7 +102,7 @@ public class InputFileTest {
     };
 
     inputFile = new InputFile();
-    inputFile.loadConfig(config);
+    inputFile.loadConfig(inputFileDescriptor);
     inputFile.addFilter(capture);
     inputFile.init();
   }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java
index 44314c6..4123dad 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/logconfig/LogConfigHandlerTest.java
@@ -21,13 +21,10 @@ package org.apache.ambari.logfeeder.logconfig;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.Date;
-import java.util.HashMap;
-import java.util.Map;
 
 import static org.easymock.EasyMock.*;
 import static org.junit.Assert.*;
 
-import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.loglevelfilter.FilterLogData;
@@ -36,6 +33,7 @@ import org.apache.ambari.logfeeder.util.LogFeederUtil;
 import org.apache.ambari.logsearch.config.api.LogSearchConfig;
 import org.apache.ambari.logsearch.config.api.model.loglevelfilter.LogLevelFilter;
 import org.apache.commons.lang.time.DateUtils;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputDescriptorImpl;
 import org.junit.BeforeClass;
 import org.junit.Test;
 
@@ -44,16 +42,18 @@ public class LogConfigHandlerTest {
   private static InputMarker inputMarkerAudit;
   private static InputMarker inputMarkerService;
   static {
-    Map<String, Object> auditMap = new HashMap<String, Object>();
-    auditMap.put(LogFeederConstants.ROW_TYPE, "audit");
+    InputDescriptorImpl auditInputDescriptor = new InputDescriptorImpl() {};
+    auditInputDescriptor.setRowtype("audit");
+    
     Input auditInput = strictMock(Input.class);
-    expect(auditInput.getConfigs()).andReturn(auditMap).anyTimes();
+    expect(auditInput.getInputDescriptor()).andReturn(auditInputDescriptor).anyTimes();
     inputMarkerAudit = new InputMarker(auditInput, null, 0);
     
-    Map<String, Object> serviceMap = new HashMap<String, Object>();
-    serviceMap.put(LogFeederConstants.ROW_TYPE, "service");
+    InputDescriptorImpl serviceInputDescriptor = new InputDescriptorImpl() {};
+    serviceInputDescriptor.setRowtype("service");
+    
     Input serviceInput = strictMock(Input.class);
-    expect(serviceInput.getConfigs()).andReturn(serviceMap).anyTimes();
+    expect(serviceInput.getInputDescriptor()).andReturn(serviceInputDescriptor).anyTimes();
     inputMarkerService = new InputMarker(serviceInput, null, 0);
     
     replay(auditInput, serviceInput);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
index 8beecda..0a0a9fd 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperDateTest.java
@@ -25,6 +25,7 @@ import java.util.HashMap;
 import java.util.Map;
 
 import org.apache.ambari.logfeeder.common.LogFeederConstants;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.MapDateDescriptorImpl;
 import org.apache.commons.lang3.time.DateUtils;
 import org.apache.log4j.Logger;
 import org.junit.Test;
@@ -40,11 +41,11 @@ public class MapperDateTest {
   public void testMapperDate_epoch() {
     LOG.info("testMapperDate_epoch()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", "epoch");
+    MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl();
+    mapDateDescriptor.setTargetDatePattern("epoch");
 
     MapperDate mapperDate = new MapperDate();
-    assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
+    assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor));
 
     Map<String, Object> jsonObj = new HashMap<>();
 
@@ -61,11 +62,11 @@ public class MapperDateTest {
   public void testMapperDate_pattern() throws Exception {
     LOG.info("testMapperDate_pattern()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
+    MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl();
+    mapDateDescriptor.setTargetDatePattern("yyyy-MM-dd HH:mm:ss.SSS");
 
     MapperDate mapperDate = new MapperDate();
-    assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
+    assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor));
 
     Map<String, Object> jsonObj = new HashMap<>();
     String dateString = "2016-04-08 15:55:23.548";
@@ -80,44 +81,35 @@ public class MapperDateTest {
   }
 
   @Test
-  public void testMapperDate_configNotMap() {
-    LOG.info("testMapperDate_configNotMap()");
-
-    MapperDate mapperDate = new MapperDate();
-    assertFalse("Was able to initialize!", mapperDate.init(null, "someField", null, ""));
-  }
-
-  @Test
   public void testMapperDate_noDatePattern() {
     LOG.info("testMapperDate_noDatePattern()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("some_param", "some_value");
+    MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl();
 
     MapperDate mapperDate = new MapperDate();
-    assertFalse("Was able to initialize!", mapperDate.init(null, "someField", null, mapConfigs));
+    assertFalse("Was not able to initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor));
   }
 
   @Test
   public void testMapperDate_notParsableDatePattern() {
     LOG.info("testMapperDate_notParsableDatePattern()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", "not_parsable_content");
+    MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl();
+    mapDateDescriptor.setTargetDatePattern("not_parsable_content");
 
     MapperDate mapperDate = new MapperDate();
-    assertFalse("Was able to initialize!", mapperDate.init(null, "someField", null, mapConfigs));
+    assertFalse("Was not able to initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor));
   }
 
   @Test
   public void testMapperDate_invalidEpochValue() {
     LOG.info("testMapperDate_invalidEpochValue()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", "epoch");
+    MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl();
+    mapDateDescriptor.setTargetDatePattern("epoch");
 
     MapperDate mapperDate = new MapperDate();
-    assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
+    assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor));
 
     Map<String, Object> jsonObj = new HashMap<>();
     String invalidValue = "abc";
@@ -131,11 +123,11 @@ public class MapperDateTest {
   public void testMapperDate_invalidDateStringValue() {
     LOG.info("testMapperDate_invalidDateStringValue()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("target_date_pattern", "yyyy-MM-dd HH:mm:ss.SSS");
+    MapDateDescriptorImpl mapDateDescriptor = new MapDateDescriptorImpl();
+    mapDateDescriptor.setTargetDatePattern("yyyy-MM-dd HH:mm:ss.SSS");
 
     MapperDate mapperDate = new MapperDate();
-    assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapConfigs));
+    assertTrue("Could not initialize!", mapperDate.init(null, "someField", null, mapDateDescriptor));
 
     Map<String, Object> jsonObj = new HashMap<>();
     String invalidValue = "abc";

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java
index 108c96e..4899dfc 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldCopyTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.logfeeder.mapper;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.MapFieldCopyDescriptorImpl;
 import org.apache.log4j.Logger;
 import org.junit.Test;
 
@@ -35,11 +36,11 @@ public class MapperFieldCopyTest {
   public void testMapperFieldCopy_copyField() {
     LOG.info("testMapperFieldCopy_copyField()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("copy_name", "someOtherField");
+    MapFieldCopyDescriptorImpl mapFieldCopyDescriptor = new MapFieldCopyDescriptorImpl();
+    mapFieldCopyDescriptor.setCopyName("someOtherField");
 
     MapperFieldCopy mapperFieldCopy = new MapperFieldCopy();
-    assertTrue("Could not initialize!", mapperFieldCopy.init(null, "someField", null, mapConfigs));
+    assertTrue("Could not initialize!", mapperFieldCopy.init(null, "someField", null, mapFieldCopyDescriptor));
 
     Map<String, Object> jsonObj = new HashMap<>();
     jsonObj.put("someField", "someValue");
@@ -52,20 +53,12 @@ public class MapperFieldCopyTest {
   }
 
   @Test
-  public void testMapperFielCopy_configNotMap() {
-    LOG.info("testMapperFieldCopy_configNotMap()");
-
-    MapperFieldCopy mapperFieldCopy = new MapperFieldCopy();
-    assertFalse("Was able to initialize!", mapperFieldCopy.init(null, "someField", null, ""));
-  }
-
-  @Test
   public void testMapperFieldCopy_noNewFieldName() {
     LOG.info("testMapperFieldCopy_noNewFieldName()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
+    MapFieldCopyDescriptorImpl mapFieldCopyDescriptor = new MapFieldCopyDescriptorImpl();
 
     MapperFieldCopy mapperFieldCopy = new MapperFieldCopy();
-    assertFalse("Was able to initialize!", mapperFieldCopy.init(null, "someField", null, mapConfigs));
+    assertFalse("Was not able to initialize!", mapperFieldCopy.init(null, "someField", null, mapFieldCopyDescriptor));
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
index 8ecaad1..74b88fc 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldNameTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.logfeeder.mapper;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.MapFieldNameDescriptorImpl;
 import org.apache.log4j.Logger;
 import org.junit.Test;
 
@@ -35,11 +36,11 @@ public class MapperFieldNameTest {
   public void testMapperFieldName_replaceField() {
     LOG.info("testMapperFieldName_replaceField()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("new_fieldname", "someOtherField");
+    MapFieldNameDescriptorImpl mapFieldNameDescriptor = new MapFieldNameDescriptorImpl();
+    mapFieldNameDescriptor.setNewFieldName("someOtherField");
 
     MapperFieldName mapperFieldName = new MapperFieldName();
-    assertTrue("Could not initialize!", mapperFieldName.init(null, "someField", null, mapConfigs));
+    assertTrue("Could not initialize!", mapperFieldName.init(null, "someField", null, mapFieldNameDescriptor));
 
     Map<String, Object> jsonObj = new HashMap<>();
     jsonObj.put("someField", "someValue");
@@ -52,20 +53,12 @@ public class MapperFieldNameTest {
   }
 
   @Test
-  public void testMapperFieldName_configNotMap() {
-    LOG.info("testMapperFieldName_configNotMap()");
-
-    MapperFieldName mapperFieldName = new MapperFieldName();
-    assertFalse("Was able to initialize!", mapperFieldName.init(null, "someField", null, ""));
-  }
-
-  @Test
   public void testMapperFieldName_noNewFieldName() {
     LOG.info("testMapperFieldName_noNewFieldName()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
+    MapFieldNameDescriptorImpl mapFieldNameDescriptor = new MapFieldNameDescriptorImpl();
 
     MapperFieldName mapperFieldName = new MapperFieldName();
-    assertFalse("Was able to initialize!", mapperFieldName.init(null, "someField", null, mapConfigs));
+    assertFalse("Was able to initialize!", mapperFieldName.init(null, "someField", null, mapFieldNameDescriptor));
   }
 }

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
index fce4308..1a33740 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/mapper/MapperFieldValueTest.java
@@ -21,6 +21,7 @@ package org.apache.ambari.logfeeder.mapper;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.MapFieldValueDescriptorImpl;
 import org.apache.log4j.Logger;
 import org.junit.Test;
 
@@ -35,12 +36,12 @@ public class MapperFieldValueTest {
   public void testMapperFieldValue_replaceValue() {
     LOG.info("testMapperFieldValue_replaceValue()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("pre_value", "someValue");
-    mapConfigs.put("post_value", "someOtherValue");
+    MapFieldValueDescriptorImpl mapFieldValueDescriptor = new MapFieldValueDescriptorImpl();
+    mapFieldValueDescriptor.setPreValue("someValue");
+    mapFieldValueDescriptor.setPostValue("someOtherValue");
 
     MapperFieldValue mapperFieldValue = new MapperFieldValue();
-    assertTrue("Could not initialize!", mapperFieldValue.init(null, "someField", null, mapConfigs));
+    assertTrue("Could not initialize!", mapperFieldValue.init(null, "someField", null, mapFieldValueDescriptor));
 
     Map<String, Object> jsonObj = new HashMap<>();
 
@@ -52,33 +53,25 @@ public class MapperFieldValueTest {
   }
 
   @Test
-  public void testMapperFieldValue_configNotMap() {
-    LOG.info("testMapperFieldValue_configNotMap()");
-
-    MapperFieldValue mapperFieldValue = new MapperFieldValue();
-    assertFalse("Was able to initialize!", mapperFieldValue.init(null, "someField", null, ""));
-  }
-
-  @Test
   public void testMapperFieldValue_noPostValue() {
     LOG.info("testMapperFieldValue_noPostValue()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
+    MapFieldValueDescriptorImpl mapFieldValueDescriptor = new MapFieldValueDescriptorImpl();
 
     MapperFieldValue mapperFieldValue = new MapperFieldValue();
-    assertFalse("Was able to initialize!", mapperFieldValue.init(null, "someField", null, mapConfigs));
+    assertFalse("Was not able to initialize!", mapperFieldValue.init(null, "someField", null, mapFieldValueDescriptor));
   }
 
   @Test
   public void testMapperFieldValue_noPreValueFound() {
     LOG.info("testMapperFieldValue_noPreValueFound()");
 
-    Map<String, Object> mapConfigs = new HashMap<>();
-    mapConfigs.put("pre_value", "someValue");
-    mapConfigs.put("post_value", "someOtherValue");
+    MapFieldValueDescriptorImpl mapFieldValueDescriptor = new MapFieldValueDescriptorImpl();
+    mapFieldValueDescriptor.setPreValue("someValue");
+    mapFieldValueDescriptor.setPostValue("someOtherValue");
 
     MapperFieldValue mapperFieldValue = new MapperFieldValue();
-    assertTrue("Could not initialize!", mapperFieldValue.init(null, "someField", null, mapConfigs));
+    assertTrue("Could not initialize!", mapperFieldValue.init(null, "someField", null, mapFieldValueDescriptor));
 
     Map<String, Object> jsonObj = new HashMap<>();
 

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputLineFilterTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputLineFilterTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputLineFilterTest.java
index 1ccc319..6e108ab 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputLineFilterTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputLineFilterTest.java
@@ -21,6 +21,8 @@ package org.apache.ambari.logfeeder.output;
 import org.apache.ambari.logfeeder.common.LogFeederConstants;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.cache.LRUCache;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputDescriptorImpl;
 import org.easymock.EasyMock;
 import org.junit.Before;
 import org.junit.Test;
@@ -49,7 +51,7 @@ public class OutputLineFilterTest {
   public void testApplyWithFilterOutByDedupInterval() {
     // GIVEN
     EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 100L, false));
-    EasyMock.expect(inputMock.getConfigs()).andReturn(generateInputConfigs());
+    EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor());
     EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD);
     EasyMock.replay(inputMock);
     // WHEN
@@ -63,7 +65,7 @@ public class OutputLineFilterTest {
   public void testApplyDoNotFilterOutDataByDedupInterval() {
     // GIVEN
     EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 10L, false));
-    EasyMock.expect(inputMock.getConfigs()).andReturn(generateInputConfigs());
+    EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor());
     EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD);
     EasyMock.replay(inputMock);
     // WHEN
@@ -77,7 +79,7 @@ public class OutputLineFilterTest {
   public void testApplyWithFilterOutByDedupLast() {
     // GIVEN
     EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 10L, true));
-    EasyMock.expect(inputMock.getConfigs()).andReturn(generateInputConfigs());
+    EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor());
     EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD);
     EasyMock.replay(inputMock);
     // WHEN
@@ -91,7 +93,7 @@ public class OutputLineFilterTest {
   public void testApplyDoNotFilterOutDataByDedupLast() {
     // GIVEN
     EasyMock.expect(inputMock.getCache()).andReturn(createLruCache("myMessage2", 10L, true));
-    EasyMock.expect(inputMock.getConfigs()).andReturn(generateInputConfigs());
+    EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor());
     EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD);
     EasyMock.replay(inputMock);
     // WHEN
@@ -117,7 +119,7 @@ public class OutputLineFilterTest {
   public void testApplyWithoutInMemoryTimestamp() {
     // GIVEN
     EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 100L, true));
-    EasyMock.expect(inputMock.getConfigs()).andReturn(generateInputConfigs());
+    EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor());
     EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD);
     EasyMock.replay(inputMock);
     Map<String, Object> lineMap = generateLineMap();
@@ -133,7 +135,7 @@ public class OutputLineFilterTest {
   public void testApplyWithoutLogMessage() {
     // GIVEN
     EasyMock.expect(inputMock.getCache()).andReturn(createLruCache(DEFAULT_DUMMY_MESSAGE, 100L, true));
-    EasyMock.expect(inputMock.getConfigs()).andReturn(generateInputConfigs());
+    EasyMock.expect(inputMock.getInputDescriptor()).andReturn(generateInputDescriptor());
     EasyMock.expect(inputMock.getCacheKeyField()).andReturn(CACHE_KEY_FIELD);
     EasyMock.replay(inputMock);
     Map<String, Object> lineMap = generateLineMap();
@@ -152,10 +154,10 @@ public class OutputLineFilterTest {
     return lineMap;
   }
 
-  private Map<String, Object> generateInputConfigs() {
-    Map<String, Object> inputConfigs = new HashMap<>();
-    inputConfigs.put(LogFeederConstants.ROW_TYPE, "service");
-    return inputConfigs;
+  private InputDescriptor generateInputDescriptor() {
+    InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {};
+    inputDescriptor.setRowtype("service");
+    return inputDescriptor;
   }
 
   private LRUCache createLruCache(String defaultKey, long defaultValue, boolean lastDedupEanabled) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
index cf1d25a..5abb720 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputManagerTest.java
@@ -32,6 +32,7 @@ import java.util.Map;
 import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.metrics.MetricData;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputDescriptorImpl;
 import org.junit.Test;
 
 public class OutputManagerTest {
@@ -91,15 +92,17 @@ public class OutputManagerTest {
     
     Input mockInput = strictMock(Input.class);
     InputMarker inputMarker = new InputMarker(mockInput, null, 0);
+    InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {};
+    inputDescriptor.setAddFields(Collections.<String, String> emptyMap());
     
     Output output1 = strictMock(Output.class);
     Output output2 = strictMock(Output.class);
     Output output3 = strictMock(Output.class);
     
-    expect(mockInput.getContextFields()).andReturn(Collections.<String, String> emptyMap());
+    expect(mockInput.getInputDescriptor()).andReturn(inputDescriptor);
     expect(mockInput.isUseEventMD5()).andReturn(false);
     expect(mockInput.isGenEventMD5()).andReturn(false);
-    expect(mockInput.getConfigs()).andReturn(Collections.<String, Object> emptyMap());
+    expect(mockInput.getInputDescriptor()).andReturn(inputDescriptor);
     expect(mockInput.getCache()).andReturn(null);
     expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3));
 
@@ -125,12 +128,13 @@ public class OutputManagerTest {
     
     Input mockInput = strictMock(Input.class);
     InputMarker inputMarker = new InputMarker(mockInput, null, 0);
+    InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {};
     
     Output output1 = strictMock(Output.class);
     Output output2 = strictMock(Output.class);
     Output output3 = strictMock(Output.class);
     
-    expect(mockInput.getConfigs()).andReturn(Collections.<String, Object> emptyMap());
+    expect(mockInput.getInputDescriptor()).andReturn(inputDescriptor);
     expect(mockInput.getOutputList()).andReturn(Arrays.asList(output1, output2, output3));
     
     output1.write(jsonString, inputMarker); expectLastCall();

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
index 1872135..7c6aca2 100644
--- a/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
+++ b/ambari-logsearch/ambari-logsearch-logfeeder/src/test/java/org/apache/ambari/logfeeder/output/OutputS3FileTest.java
@@ -22,6 +22,7 @@ import org.apache.ambari.logfeeder.input.Input;
 import org.apache.ambari.logfeeder.input.InputMarker;
 import org.apache.ambari.logfeeder.output.spool.LogSpooler;
 import org.apache.ambari.logfeeder.output.spool.LogSpoolerContext;
+import org.apache.ambari.logsearch.config.zookeeper.model.inputconfig.impl.InputDescriptorImpl;
 import org.junit.Before;
 import org.junit.Test;
 
@@ -33,7 +34,6 @@ import static org.easymock.EasyMock.*;
 import static org.junit.Assert.assertFalse;
 import static org.junit.Assert.assertTrue;
 
-
 public class OutputS3FileTest {
 
   private Map<String, Object> configMap;
@@ -71,8 +71,11 @@ public class OutputS3FileTest {
 
     Input input = mock(Input.class);
     InputMarker inputMarker = new InputMarker(input, null, 0);
+    InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {};
+    inputDescriptor.setType("hdfs-namenode");
+    
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
-    expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
+    expect(input.getInputDescriptor()).andReturn(inputDescriptor);
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block");
     final S3Uploader s3Uploader = mock(S3Uploader.class);
@@ -99,8 +102,11 @@ public class OutputS3FileTest {
   public void shouldReuseSpoolerForSamePath() throws Exception {
     Input input = mock(Input.class);
     InputMarker inputMarker = new InputMarker(input, null, 0);
+    InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {};
+    inputDescriptor.setType("hdfs-namenode");
+    
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
-    expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
+    expect(input.getInputDescriptor()).andReturn(inputDescriptor);
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block1");
     spooler.add("log event block2");
@@ -169,8 +175,11 @@ public class OutputS3FileTest {
   public void shouldUploadFileOnRollover() throws Exception {
     Input input = mock(Input.class);
     InputMarker inputMarker = new InputMarker(input, null, 0);
+    InputDescriptorImpl inputDescriptor = new InputDescriptorImpl() {};
+    inputDescriptor.setType("hdfs-namenode");
+    
     expect(input.getFilePath()).andReturn("/var/log/hdfs-namenode.log");
-    expect(input.getStringValue(OutputS3File.INPUT_ATTRIBUTE_TYPE)).andReturn("hdfs-namenode");
+    expect(input.getInputDescriptor()).andReturn(inputDescriptor);
     final LogSpooler spooler = mock(LogSpooler.class);
     spooler.add("log event block1");
     final S3Uploader s3Uploader = mock(S3Uploader.class);

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java
index 1118233..44d91a9 100644
--- a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/manager/ShipperConfigManager.java
@@ -21,7 +21,9 @@ package org.apache.ambari.logsearch.manager;
 
 import java.util.List;
 
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
 import org.apache.ambari.logsearch.configurer.LogSearchConfigConfigurer;
+import org.apache.ambari.logsearch.model.common.LSServerInputConfig;
 import org.apache.ambari.logsearch.model.common.LSServerLogLevelFilterMap;
 import org.apache.log4j.Logger;
 
@@ -50,8 +52,9 @@ public class ShipperConfigManager extends JsonManagerBase {
     return LogSearchConfigConfigurer.getConfig().getServices(clusterName);
   }
 
-  public String getInputConfig(String clusterName, String serviceName) {
-    return LogSearchConfigConfigurer.getConfig().getInputConfig(clusterName, serviceName);
+  public LSServerInputConfig getInputConfig(String clusterName, String serviceName) {
+    InputConfig inputConfig = LogSearchConfigConfigurer.getConfig().getInputConfig(clusterName, serviceName);
+    return new LSServerInputConfig(inputConfig);
   }
 
   public Response createInputConfig(String clusterName, String serviceName, String inputConfig) {

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerConditions.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerConditions.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerConditions.java
new file mode 100644
index 0000000..9cd9710
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerConditions.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.model.common;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.Conditions;
+
+import io.swagger.annotations.ApiModel;
+
+@ApiModel
+public class LSServerConditions {
+  private LSServerFields fields;
+  
+  public LSServerConditions(Conditions conditions) {
+    this.fields = new LSServerFields(conditions.getFields());
+  }
+
+  public LSServerFields getFields() {
+    return fields;
+  }
+
+  public void setFields(LSServerFields fields) {
+    this.fields = fields;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFields.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFields.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFields.java
new file mode 100644
index 0000000..5f570da
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFields.java
@@ -0,0 +1,43 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.model.common;
+
+import java.util.Set;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.Fields;
+
+import io.swagger.annotations.ApiModel;
+
+@ApiModel
+public class LSServerFields {
+  private Set<String> type;
+  
+  public LSServerFields(Fields fields) {
+    this.type = fields.getType();
+  }
+
+  public Set<String> getType() {
+    return type;
+  }
+
+  public void setType(Set<String> type) {
+    this.type = type;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilter.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilter.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilter.java
new file mode 100644
index 0000000..0190c01
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilter.java
@@ -0,0 +1,130 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.model.common;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.PostMapValues;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonProperty;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+
+import io.swagger.annotations.ApiModel;
+
+@ApiModel
+@JsonInclude(Include.NON_NULL)
+public abstract class LSServerFilter {
+  private String filter;
+  
+  private LSServerConditions conditions;
+  
+  @JsonProperty("sort_order")
+  private Integer sortOrder;
+  
+  private String sourceField;
+  
+  @JsonProperty("remove_source_field")
+  private Boolean removeSourceField;
+  
+  private Map<String, List<LSServerPostMapValues>> postMapValues;
+  
+  @JsonProperty("is_enabled")
+  private Boolean isEnabled;
+
+  public LSServerFilter(FilterDescriptor filterDescriptor) {
+    this.filter = filterDescriptor.getFilter();
+    this.conditions = new LSServerConditions(filterDescriptor.getConditions());
+    this.sortOrder = filterDescriptor.getSortOrder();
+    this.sourceField = filterDescriptor.getSourceField();
+    this.removeSourceField = filterDescriptor.isRemoveSourceField();
+    
+    postMapValues = new HashMap<String, List<LSServerPostMapValues>>();
+    for (Map.Entry<String, ? extends List<? extends PostMapValues>> e : filterDescriptor.getPostMapValues().entrySet()) {
+      List<LSServerPostMapValues> lsServerPostMapValues = new ArrayList<>();
+      for (PostMapValues pmv : e.getValue()) {
+        lsServerPostMapValues.add(new LSServerPostMapValues(pmv));
+      }
+      postMapValues.put(e.getKey(), lsServerPostMapValues);
+    }
+    
+    this.isEnabled = filterDescriptor.isEnabled();
+  }
+
+  public String getFilter() {
+    return filter;
+  }
+
+  public void setFilter(String filter) {
+    this.filter = filter;
+  }
+
+  public LSServerConditions getConditions() {
+    return conditions;
+  }
+
+  public void setConditions(LSServerConditions conditions) {
+    this.conditions = conditions;
+  }
+
+  public Integer getSortOrder() {
+    return sortOrder;
+  }
+
+  public void setSortOrder(Integer sortOrder) {
+    this.sortOrder = sortOrder;
+  }
+
+  public String getSourceField() {
+    return sourceField;
+  }
+
+  public void setSourceField(String sourceField) {
+    this.sourceField = sourceField;
+  }
+
+  public Boolean getRemoveSourceField() {
+    return removeSourceField;
+  }
+
+  public void setRemoveSourceField(Boolean removeSourceField) {
+    this.removeSourceField = removeSourceField;
+  }
+
+  public Map<String, List<LSServerPostMapValues>> getPostMapValues() {
+    return postMapValues;
+  }
+
+  public void setPostMapValues(Map<String, List<LSServerPostMapValues>> postMapValues) {
+    this.postMapValues = postMapValues;
+  }
+
+  public Boolean getIsEnabled() {
+    return isEnabled;
+  }
+
+  public void setIsEnabled(Boolean isEnabled) {
+    this.isEnabled = isEnabled;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterGrok.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterGrok.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterGrok.java
new file mode 100644
index 0000000..a8c4a7a
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterGrok.java
@@ -0,0 +1,73 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.model.common;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.swagger.annotations.ApiModel;
+
+@ApiModel
+public class LSServerFilterGrok extends LSServerFilter {
+  @JsonProperty("log4j_format")
+  private String log4jFormat;
+
+  @JsonProperty("multiline_pattern")
+  private String multilinePattern;
+
+  @JsonProperty("message_pattern")
+  private String messagePattern;
+
+  public LSServerFilterGrok(FilterDescriptor filterDescriptor) {
+    super(filterDescriptor);
+    if (filterDescriptor instanceof FilterGrokDescriptor) {
+      FilterGrokDescriptor filterGrokDescriptor = (FilterGrokDescriptor)filterDescriptor;
+      this.log4jFormat = filterGrokDescriptor.getLog4jFormat();
+      this.multilinePattern = filterGrokDescriptor.getMultilinePattern();
+      this.messagePattern = filterGrokDescriptor.getMessagePattern();
+    }
+  }
+
+  public String getLog4jFormat() {
+    return log4jFormat;
+  }
+
+  public void setLog4jFormat(String log4jFormat) {
+    this.log4jFormat = log4jFormat;
+  }
+
+  public String getMultilinePattern() {
+    return multilinePattern;
+  }
+
+  public void setMultilinePattern(String multilinePattern) {
+    this.multilinePattern = multilinePattern;
+  }
+
+  public String getMessagePattern() {
+    return messagePattern;
+  }
+
+  public void setMessagePattern(String messagePattern) {
+    this.messagePattern = messagePattern;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterJson.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterJson.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterJson.java
new file mode 100644
index 0000000..3c0ed17
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterJson.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.model.common;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+
+import io.swagger.annotations.ApiModel;
+
+@ApiModel
+public class LSServerFilterJson extends LSServerFilter {
+  public LSServerFilterJson(FilterDescriptor filterDescriptor) {
+    super(filterDescriptor);
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterKeyValue.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterKeyValue.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterKeyValue.java
new file mode 100644
index 0000000..dcee25d
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerFilterKeyValue.java
@@ -0,0 +1,71 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.model.common;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor;
+
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.swagger.annotations.ApiModel;
+
+@ApiModel
+public class LSServerFilterKeyValue extends LSServerFilter {
+  @JsonProperty("field_split")
+  private String fieldSplit;
+
+  @JsonProperty("value_split")
+  private String valueSplit;
+
+  @JsonProperty("value_borders")
+  private String valueBorders;
+
+  public LSServerFilterKeyValue(FilterDescriptor filterDescriptor) {
+    super(filterDescriptor);
+    FilterKeyValueDescriptor filterKeyValueDescriptor = (FilterKeyValueDescriptor)filterDescriptor;
+    this.fieldSplit = filterKeyValueDescriptor.getFieldSplit();
+    this.valueSplit = filterKeyValueDescriptor.getValueSplit();
+    this.valueBorders = filterKeyValueDescriptor.getValueBorders();
+  }
+
+  public String getFieldSplit() {
+    return fieldSplit;
+  }
+
+  public void setFieldSplit(String fieldSplit) {
+    this.fieldSplit = fieldSplit;
+  }
+
+  public String getValueSplit() {
+    return valueSplit;
+  }
+
+  public void setValueSplit(String valueSplit) {
+    this.valueSplit = valueSplit;
+  }
+
+  public String getValueBorders() {
+    return valueBorders;
+  }
+
+  public void setValueBorders(String valueBorders) {
+    this.valueBorders = valueBorders;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInput.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInput.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInput.java
new file mode 100644
index 0000000..fe83fe4
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInput.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.model.common;
+
+import java.util.Map;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor;
+
+import com.fasterxml.jackson.annotation.JsonInclude;
+import com.fasterxml.jackson.annotation.JsonInclude.Include;
+import com.fasterxml.jackson.annotation.JsonProperty;
+
+import io.swagger.annotations.ApiModel;
+
+@ApiModel
+@JsonInclude(Include.NON_NULL)
+public abstract class LSServerInput {
+  private final String type;
+  private final String rowtype;
+  private final String path;
+  
+  @JsonProperty("add_fields")
+  private final Map<String, String> addFields;
+  
+  private final String source;
+  private final Boolean tail;
+  
+  @JsonProperty("gen_event_md5")
+  private final Boolean genEventMd5;
+  
+  @JsonProperty("use_event_md5_as_id")
+  private final Boolean useEventMd5AsId;
+  
+  @JsonProperty("start_position")
+  private final String startPosition;
+  
+  @JsonProperty("cache_enabled")
+  private final Boolean cacheEnabled;
+  
+  @JsonProperty("cache_key_field")
+  private final String cacheKeyField;
+  
+  @JsonProperty("cache_last_dedup_enabled")
+  private final Boolean cacheLastDedupEnabled;
+  
+  @JsonProperty("cache_size")
+  private final Integer cacheSize;
+  
+  @JsonProperty("cache_dedup_interval")
+  private final Long cacheDedupInterval;
+  
+  @JsonProperty("is_enabled")
+  private final Boolean isEnabled;
+  
+  public LSServerInput(InputDescriptor inputDescriptor) {
+    this.type = inputDescriptor.getType();
+    this.rowtype = inputDescriptor.getRowtype();
+    this.path = inputDescriptor.getPath();
+    this.addFields = inputDescriptor.getAddFields();
+    this.source = inputDescriptor.getSource();
+    this.tail = inputDescriptor.isTail();
+    this.genEventMd5 = inputDescriptor.isGenEventMd5();
+    this.useEventMd5AsId = inputDescriptor.isUseEventMd5AsId();
+    this.startPosition = inputDescriptor.getStartPosition();
+    this.cacheEnabled = inputDescriptor.isCacheEnabled();
+    this.cacheKeyField = inputDescriptor.getCacheKeyField();
+    this.cacheLastDedupEnabled = inputDescriptor.getCacheLastDedupEnabled();
+    this.cacheSize = inputDescriptor.getCacheSize();
+    this.cacheDedupInterval = inputDescriptor.getCacheDedupInterval();
+    this.isEnabled = inputDescriptor.isEnabled();
+  }
+
+  public String getType() {
+    return type;
+  }
+
+  public String getRowtype() {
+    return rowtype;
+  }
+
+  public String getPath() {
+    return path;
+  }
+
+  public Map<String, String> getAddFields() {
+    return addFields;
+  }
+
+  public String getSource() {
+    return source;
+  }
+
+  public Boolean getTail() {
+    return tail;
+  }
+
+  public Boolean getGenEventMd5() {
+    return genEventMd5;
+  }
+
+  public Boolean getUseEventMd5AsId() {
+    return useEventMd5AsId;
+  }
+
+  public String getStartPosition() {
+    return startPosition;
+  }
+
+  public Boolean getCacheEnabled() {
+    return cacheEnabled;
+  }
+
+  public String getCacheKeyField() {
+    return cacheKeyField;
+  }
+
+  public Boolean getCacheLastDedupEnabled() {
+    return cacheLastDedupEnabled;
+  }
+
+  public Integer getCacheSize() {
+    return cacheSize;
+  }
+
+  public Long getCacheDedupInterval() {
+    return cacheDedupInterval;
+  }
+
+  public Boolean getIsEnabled() {
+    return isEnabled;
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/fd4a7a46/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputConfig.java
----------------------------------------------------------------------
diff --git a/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputConfig.java b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputConfig.java
new file mode 100644
index 0000000..e3dc0d1
--- /dev/null
+++ b/ambari-logsearch/ambari-logsearch-server/src/main/java/org/apache/ambari/logsearch/model/common/LSServerInputConfig.java
@@ -0,0 +1,87 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * 
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * 
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.apache.ambari.logsearch.model.common;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterGrokDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterJsonDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.FilterKeyValueDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputConfig;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputFileBaseDescriptor;
+import org.apache.ambari.logsearch.config.api.model.inputconfig.InputS3FileDescriptor;
+
+import io.swagger.annotations.ApiModel;
+import io.swagger.annotations.ApiModelProperty;
+
+@ApiModel
+public class LSServerInputConfig {
+  @ApiModelProperty
+  private List<LSServerInput> input;
+  
+  @ApiModelProperty
+  private List<LSServerFilter> filter;
+  
+  public LSServerInputConfig(InputConfig inputConfig) {
+    input = new ArrayList<>();
+    for (InputDescriptor inputDescriptor : inputConfig.getInput()) {
+      if (inputDescriptor instanceof InputFileBaseDescriptor) {
+        LSServerInput inputItem = new LSServerInputFile(inputDescriptor);
+        input.add(inputItem);
+      } else if (inputDescriptor instanceof InputS3FileDescriptor) {
+        LSServerInput inputItem = new LSServerInputS3File(inputDescriptor);
+        input.add(inputItem);
+      }
+    }
+    
+    filter = new ArrayList<>();
+    for (FilterDescriptor filterDescriptor : inputConfig.getFilter()) {
+      if (filterDescriptor instanceof FilterGrokDescriptor) {
+        LSServerFilter filterItem = new LSServerFilterGrok(filterDescriptor);
+        filter.add(filterItem);
+      } else if (filterDescriptor instanceof FilterKeyValueDescriptor) {
+        LSServerFilter filterItem = new LSServerFilterKeyValue(filterDescriptor);
+        filter.add(filterItem);
+      } else if (filterDescriptor instanceof FilterJsonDescriptor) {
+        LSServerFilter filterItem = new LSServerFilterJson(filterDescriptor);
+        filter.add(filterItem);
+      }
+    }
+  }
+
+  public List<LSServerInput> getInput() {
+    return input;
+  }
+
+  public void setInput(List<LSServerInput> input) {
+    this.input = input;
+  }
+
+  public List<LSServerFilter> getFilter() {
+    return filter;
+  }
+
+  public void setFilter(List<LSServerFilter> filter) {
+    this.filter = filter;
+  }
+}