You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@ambari.apache.org by al...@apache.org on 2016/06/29 14:37:00 UTC

[01/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Repository: ambari
Updated Branches:
  refs/heads/branch-2.4 f403a36ff -> 283256c83


http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs
deleted file mode 100644
index 60fccf3..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page.hbs
+++ /dev/null
@@ -1,42 +0,0 @@
-{{!
-   Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-}}
-<nav class="navbar navbar-default">
-  <div class="container-fluid">
-    <ul class="nav navbar-nav">
-      <li class="dropdown nav-tab"><a class="dropdown-toggle" data-toggle="dropdown" href="#">Hive <span class="caret"></span></a>
-        <ul class="dropdown-menu">
-          <li>{{#link-to 'homePage.hive-history'}}HiveHistory Query{{/link-to}}</li>
-          <li>{{#link-to 'homePage.hive-saved-query'}}HiveSaved Query{{/link-to}}</li>
-        </ul>
-      </li>
-      <li class="dropdown nav-tab"><a class="dropdown-toggle" data-toggle="dropdown" href="#">Pig <span class="caret"></span></a>
-        <ul class="dropdown-menu">
-          <li>{{#link-to 'homePage.pig-script'}}PigSaved script{{/link-to}}</li>
-          <li>{{#link-to 'homePage.pig-job'}}PigJob{{/link-to}}</li>
-        </ul>
-      </li>
-      <li>{{#link-to 'homePage.revert-change'}}RevertChange{{/link-to}}</li>
-    </ul>
-  </div>
-</nav>
-
-<div class="container">
-  {{outlet}}
-
-</div>
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs
deleted file mode 100644
index b43e49f..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-history.hbs
+++ /dev/null
@@ -1,125 +0,0 @@
-{{!
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-}}
-<div class="panel panel-default">
-  <div class="panel-heading">
-    <h3>History Query Migration
-    </h3>
-  </div>
-  <div class="panel-body">
-    <div class="row">
-      <div class="col-sm-3">
-        User Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.usersdetail  optionValuePath="content.username" optionLabelPath="content.username" value=usernamehue placeholder="Select an userName" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Instance Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.hiveinstancedetail  optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename  placeholder="Select an Instance name" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Start Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=startdate valueFormat='YYYY-MM-DD' name="startdate" id="startdate" value=startdate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        End Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=enddate valueFormat='YYYY-MM-DD' name="enddate" id="enddate" value=enddate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        <button class="btn btn-success" {{action 'submitResult'}} disabled={{jobstatus}}>Submit</button>
-      </div>
-      <div class="col-sm-3">
-        {{#if jobstatus}}
-          <h5>
-            <font color="green">Job has been Submitted.
-            </font>
-          </h5>
-        {{/if}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if jobstatus}}
-          <br>
-          <div class="progress" id="progressbar" style="">
-            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="50"  style="width:{{progressBar}}%">
-            </div>
-          </div>
-        {{/if}}
-      </div>
-    </div>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if completionStatus}}
-          <h2>Migration Report
-          </h2>
-          <table class="table table-hover">
-            <thead>
-            <tr>
-              <th>Parameters</th>
-              <th>Status</th>
-            </tr>
-            </thead>
-            <tbody>
-            <tr>
-              <td>Number of Query Transferred</td>
-              <td>{{numberOfQueryTransfered}}</td>
-            </tr>
-            <tr>
-              <td>Total Number of Queries</td>
-              <td>{{totalNoQuery}}</td>
-            </tr>
-            <tr>
-              <td>Total Time Taken</td><td>{{totalTimeTaken}}ms</td>
-            </tr>
-            <tr>
-              <td>Hue User Name(Source)</td>
-              <td>{{Username}}</td>
-            </tr>
-            <tr>
-              <td>Ambari Instance Name(Target)</td>
-              <td>{{instanceName}}</td>
-            </tr>
-            </tbody>
-          </table>
-        {{/if}}
-      </div>
-    </div>
-  </div>
-</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs
deleted file mode 100644
index c755446..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/hive-saved-query.hbs
+++ /dev/null
@@ -1,126 +0,0 @@
-{{!
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-}}
-<div class="panel panel-default">
-  <div class="panel-heading">
-    <h3>Saved Query Migration
-    </h3>
-  </div>
-  <div class="panel-body">
-    <div class="row">
-      <div class="col-sm-3">
-        User Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.usersdetail  optionValuePath="content.username" optionLabelPath="content.username" value=usernamehue placeholder="Select an userName" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Instance Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.hiveinstancedetail  optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename  placeholder="Select an Instance name" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Start Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=startdate valueFormat='YYYY-MM-DD' name="startdate" id="startdate" value=startdate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        End Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=enddate valueFormat='YYYY-MM-DD' name="enddate" id="enddate" value=enddate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        <button class="btn btn-success" {{action 'submitResult'}} disabled={{jobstatus}}>Submit</button>
-      </div>
-      <div class="col-sm-3">
-        {{#if jobstatus}}
-          <h5>
-            <font color="green">Job has been Submitted.
-            </font>
-          </h5>
-        {{/if}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if jobstatus}}
-          <br>
-          <div class="progress" id="progressbar" style="">
-            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="50"  style="width:{{progressBar}}%">
-            </div>
-          </div>
-        {{/if}}
-      </div>
-    </div>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if completionStatus}}
-          <h2>Migration Report
-          </h2>
-          <table class="table table-hover">
-            <thead>
-            <tr>
-              <th>Parameters</th>
-              <th>Status</th>
-            </tr>
-            </thead>
-            <tbody>
-            <tr>
-              <td>Number of Query Transferred</td>
-              <td>{{numberOfQueryTransfered}}</td>
-            </tr>
-            <tr>
-              <td>Total Number of Queries</td>
-              <td>{{totalNoQuery}}</td>
-            </tr>
-            <tr>
-              <td>Total Time Taken</td>
-              <td>{{totalTimeTaken}}ms</td>
-            </tr>
-            <tr>
-              <td>Hue User Name(Source)</td>
-              <td>{{Username}}</td>
-            </tr>
-            <tr>
-              <td>Ambari Instance Name(Target)</td>
-              <td>{{instanceName}}</td>
-            </tr>
-            </tbody>
-          </table>
-        {{/if}}
-      </div>
-    </div>
-  </div>
-</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs
deleted file mode 100644
index 4562ce3..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-job.hbs
+++ /dev/null
@@ -1,127 +0,0 @@
-{{!
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-}}
-<div class="panel panel-default">
-  <div class="panel-heading">
-    <h3>Pig Job Migration
-    </h3>
-  </div>
-  <div class="panel-body">
-    <div class="row">
-      <div class="col-sm-3">
-        User Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.usersdetail  optionValuePath="content.username" optionLabelPath="content.username" value=usernamehue placeholder="Select an userName" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Instance Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.piginstancedetail  optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename  placeholder="Select an Instance name" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Start Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=startdate valueFormat='YYYY-MM-DD' name="startdate" id="startdate" value=startdate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        End Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=enddate valueFormat='YYYY-MM-DD' name="enddate" id="enddate" value=enddate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        <button class="btn btn-success" {{action 'submitResult'}} disabled={{jobstatus}}>Submit
-        </button>
-      </div>
-      <div class="col-sm-3">
-        {{#if jobstatus}}
-          <h5>
-            <font color="green">Job has been Submitted.
-            </font>
-          </h5>
-        {{/if}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if jobstatus}}
-          <br>
-          <div class="progress" id="progressbar" style="">
-            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="50"  style="width:{{progressBar}}%">
-            </div>
-          </div>
-        {{/if}}
-      </div>
-    </div>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if completionStatus}}
-          <h2>Migration Report
-          </h2>
-          <table class="table table-hover">
-            <thead>
-            <tr>
-              <th>Parameters</th>
-              <th>Status</th>
-            </tr>
-            </thead>
-            <tbody>
-            <tr>
-              <td>Number of Query Transferred</td>
-              <td>{{numberOfQueryTransfered}}</td>
-            </tr>
-            <tr>
-              <td>Total Number of Queries</td>
-              <td>{{totalNoQuery}}</td>
-            </tr>
-            <tr>
-              <td>Total Time Taken</td>
-              <td>{{totalTimeTaken}}ms</td>
-            </tr>
-            <tr>
-              <td>Hue User Name(Source)</td>
-              <td>{{Username}}</td>
-            </tr>
-            <tr>
-              <td>Ambari Instance Name(Target)</td>
-              <td>{{instanceName}}</td>
-            </tr>
-            </tbody>
-          </table>
-        {{/if}}
-      </div>
-    </div>
-  </div>
-</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs
deleted file mode 100644
index 425b6ea..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/pig-script.hbs
+++ /dev/null
@@ -1,127 +0,0 @@
-{{!
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-}}
-<div class="panel panel-default">
-  <div class="panel-heading">
-    <h3>Saved Script Migration
-    </h3>
-  </div>
-  <div class="panel-body">
-    <div class="row">
-      <div class="col-sm-3">
-        User Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.usersdetail  optionValuePath="content.username" optionLabelPath="content.username" value=usernamehue placeholder="Select an userName" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Instance Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.piginstancedetail  optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename  placeholder="Select an Instance name" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Start Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=startdate valueFormat='YYYY-MM-DD' name="startdate" id="startdate" value=startdate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        End Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=enddate valueFormat='YYYY-MM-DD' name="enddate" id="enddate" value=enddate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        <button class="btn btn-success" {{action 'submitResult'}} disabled={{jobstatus}}>Submit
-        </button>
-      </div>
-      <div class="col-sm-3">
-        {{#if jobstatus}}
-          <h5>
-            <font color="green">Job has been Submitted.
-            </font>
-          </h5>
-        {{/if}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if jobstatus}}
-          <br>
-          <div class="progress" id="progressbar" style="">
-            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="50"  style="width:{{progressBar}}%">
-            </div>
-          </div>
-        {{/if}}
-      </div>
-    </div>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if completionStatus}}
-          <h2>Migration Report
-          </h2>
-          <table class="table table-hover">
-            <thead>
-            <tr>
-              <th>Parameters</th>
-              <th>Status</th>
-            </tr>
-            </thead>
-            <tbody>
-            <tr>
-              <td>Number of Query Transferred</td>
-              <td>{{numberOfQueryTransfered}}</td>
-            </tr>
-            <tr>
-              <td>Total Number of Queries</td>
-              <td>{{totalNoQuery}}</td>
-            </tr>
-            <tr>
-              <td>Total Time Taken</td>
-              <td>{{totalTimeTaken}}ms</td>
-            </tr>
-            <tr>
-              <td>Hue User Name(Source)</td>
-              <td>{{Username}}</td>
-            </tr>
-            <tr>
-              <td>Ambari Instance Name(Target)</td>
-              <td>{{instanceName}}</td>
-            </tr>
-            </tbody>
-          </table>
-        {{/if}}
-      </div>
-    </div>
-  </div>
-</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/revert-change.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/revert-change.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/revert-change.hbs
deleted file mode 100644
index a3ea43d..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/home-page/revert-change.hbs
+++ /dev/null
@@ -1,99 +0,0 @@
-{{!
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-}}
-<div class="panel panel-default">
-  <div class="panel-heading">
-    <h3>Revert Change
-    </h3>
-  </div>
-  <div class="panel-body">
-    <div class="row">
-      <div class="col-sm-3">
-        Instance Name
-        <font size="3" color="red"> *
-        </font>
-      </div>
-      <div class="col-sm-3">
-        {{ember-selectize content=model.allinstancedetail  optionValuePath="content.instanceName" optionLabelPath="content.instanceName" value=instancename  placeholder="Select an Instance name" }}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        Revert Date
-      </div>
-      <div class="col-sm-9">
-        {{date-picker size="35" date=startdate valueFormat='YYYY-MM-DD HH:MM:SS' name="revertdate" id="revertdate" value=revertdate}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-3">
-        <button class="btn btn-success" {{action 'submitResult'}} disabled={{jobstatus}}>Submit
-        </button>
-      </div>
-      <div class="col-sm-3">
-        {{#if jobstatus}}
-          <h5>
-            <font color="green">Job has been Submitted.
-            </font>
-          </h5>
-        {{/if}}
-      </div>
-    </div>
-    <br>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if jobstatus}}
-          <br>
-          <div class="progress" id="progressbar" style="">
-            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="50"  style="width:{{progressBar}}%">
-            </div>
-          </div>
-        {{/if}}
-      </div>
-    </div>
-    <div class="row">
-      <div class="col-sm-9">
-        {{#if completionStatus}}
-          <h2>RevertChange Report
-          </h2>
-          <table class="table table-hover">
-            <thead>
-            <tr>
-              <th>Parameters</th>
-              <th>Status</th>
-            </tr>
-            </thead>
-            <tbody>
-            <tr>
-              <td>Total Number of Query Reverted</td>
-              <td>{{numberOfQueryTransfered}}</td>
-            </tr>
-            <tr>
-              <td>Total Time Taken</td>
-              <td>{{totalTimeTaken}}ms</td>
-            </tr>
-            <tr>
-              <td>Instance Name</td>
-              <td>{{instanceName}}</td>
-            </tr>
-            </tbody>
-          </table>
-        {{/if}}
-      </div>
-    </div>
-  </div>
-</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/bower.json b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/bower.json
deleted file mode 100644
index f80b9ad..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/bower.json
+++ /dev/null
@@ -1,14 +0,0 @@
-{
-  "name": "hueambarimigration-view",
-  "dependencies": {
-    "ember": "~2.4.3",
-    "ember-cli-shims": "0.1.1",
-    "ember-cli-test-loader": "0.2.2",
-    "ember-qunit-notifications": "0.1.0",
-    "bootstrap": "^3.3.6",
-    "pikaday": "^1.4.0",
-    "moment": "^2.13.0",
-    "selectize": "miguelcobain/selectize.js#master",
-    "pace": "^1.0.2"
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/config/environment.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/config/environment.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/config/environment.js
deleted file mode 100644
index 07b1e26..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/config/environment.js
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-module.exports = function(environment) {
-  var ENV = {
-    modulePrefix: 'hueambarimigration-view',
-    environment: environment,
-    baseURL: '/',
-    locationType: 'hash',
-    EmberENV: {
-      FEATURES: {
-        // Here you can enable experimental features on an ember canary build
-        // e.g. 'with-controller': true
-      }
-    },
-
-    APP: {
-      // Here you can pass flags/options to your application instance
-      // when it is created
-    }
-  };
-
-  if (environment === 'development') {
-    // ENV.APP.LOG_RESOLVER = true;
-     ENV.APP.LOG_ACTIVE_GENERATION = true;
-    // ENV.APP.LOG_TRANSITIONS = true;
-    // ENV.APP.LOG_TRANSITIONS_INTERNAL = true;
-     ENV.APP.LOG_VIEW_LOOKUPS = true;
-  }
-
-  if (environment === 'test') {
-    // Testem prefers this...
-    ENV.baseURL = '/';
-    ENV.locationType = 'auto';
-
-    // keep test console output quieter
-    ENV.APP.LOG_ACTIVE_GENERATION = false;
-    ENV.APP.LOG_VIEW_LOOKUPS = false;
-
-    ENV.APP.rootElement = '#ember-testing';
-  }
-
-  if (environment === 'production') {
-
-  }
-
-  return ENV;
-};

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/ember-cli-build.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/ember-cli-build.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/ember-cli-build.js
deleted file mode 100644
index 002bf1c..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/ember-cli-build.js
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*jshint node:true*/
-/* global require, module */
-var EmberApp = require('ember-cli/lib/broccoli/ember-app');
-
-module.exports = function(defaults) {
-  var app = new EmberApp(defaults, {
-
-    // Add options here
-  });
-
-  // Use `app.import` to add additional libraries to the generated
-  // output files.
-  //
-  // If you need to use different assets in different
-  // environments, specify an object as the first parameter. That
-  // object's keys should be the environment name and the values
-  // should be the asset to use in that environment.
-  //
-  // If the library that you are including contains AMD or ES6
-  // modules that you would like to import into your application
-  // please specify an object with the list of modules as keys
-  // along with the exports of each module as its value.
-
-  app.import('bower_components/bootstrap/dist/css/bootstrap.css');
-  app.import('bower_components/bootstrap/dist/css/bootstrap.css.map',{
-  destDir: 'assets'
-  });
-  app.import('bower_components/bootstrap/dist/js/bootstrap.js');
-  app.import('bower_components/bootstrap/fonts/glyphicons-halflings-regular.woff', {
-    destDir: 'fonts'
-  });
-  app.import('bower_components/bootstrap/fonts/glyphicons-halflings-regular.woff2', {
-      destDir: 'fonts'
-    });
-  app.import('bower_components/bootstrap/fonts/glyphicons-halflings-regular.ttf', {
-      destDir: 'fonts'
-    });
-
-
-
-
-  return app.toTree();
-};

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/package.json
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/package.json b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/package.json
deleted file mode 100644
index b9f832e..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/package.json
+++ /dev/null
@@ -1,51 +0,0 @@
-{
-  "name": "hueambarimigration-view",
-  "version": "0.0.0",
-  "description": "Small description for hueambarimigration-view goes here",
-  "private": true,
-  "directories": {
-    "doc": "doc",
-    "test": "tests"
-  },
-  "scripts": {
-    "start": "ember server",
-    "build": "ember build",
-    "test": "ember test",
-    "preinstall": "chmod +x node/npm/bin/node-gyp-bin/node-gyp",
-    "postinstall": "bash node/with_new_path.sh node node_modules/.bin/bower --allow-root install"
-
-  },
-  "repository": "",
-  "engines": {
-    "node": ">= 0.10.0"
-  },
-  "author": "",
-  "license": "MIT",
-  "devDependencies": {
-    "bower": "1.7.2",
-    "broccoli-asset-rev": "^2.4.2",
-    "ember-ajax": "0.7.1",
-    "ember-cli": "2.4.3",
-    "ember-cli-app-version": "^1.0.0",
-    "ember-cli-auto-complete": "^0.2.1",
-    "ember-cli-babel": "^5.1.6",
-    "ember-cli-datepicker": "2.0.1",
-    "ember-cli-dependency-checker": "^1.2.0",
-    "ember-cli-htmlbars": "^1.0.3",
-    "ember-cli-htmlbars-inline-precompile": "^0.3.1",
-    "ember-cli-inject-live-reload": "^1.4.0",
-    "ember-cli-pace": "0.1.0",
-    "ember-cli-qunit": "^1.4.0",
-    "ember-cli-release": "0.2.8",
-    "ember-cli-selectize": "0.5.3",
-    "ember-cli-sri": "^2.1.0",
-    "ember-cli-uglify": "^1.2.0",
-    "ember-data": "^2.4.2",
-    "ember-export-application-global": "^1.0.5",
-    "ember-load-initializers": "^0.5.1",
-    "ember-power-select": "0.10.4",
-    "ember-resolver": "^2.0.3",
-    "emberx-select": "2.1.2",
-    "loader.js": "^4.0.1"
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/public/crossdomain.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/public/crossdomain.xml b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/public/crossdomain.xml
deleted file mode 100644
index 0c16a7a..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/public/crossdomain.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0"?>
-<!DOCTYPE cross-domain-policy SYSTEM "http://www.adobe.com/xml/dtds/cross-domain-policy.dtd">
-<cross-domain-policy>
-  <!-- Read this: www.adobe.com/devnet/articles/crossdomain_policy_file_spec.html -->
-
-  <!-- Most restrictive policy: -->
-  <site-control permitted-cross-domain-policies="none"/>
-
-  <!-- Least restrictive policy: -->
-  <!--
-  <site-control permitted-cross-domain-policies="all"/>
-  <allow-access-from domain="*" to-ports="*" secure="false"/>
-  <allow-http-request-headers-from domain="*" headers="*" secure="false"/>
-  -->
-</cross-domain-policy>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/public/robots.txt
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/public/robots.txt b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/public/robots.txt
deleted file mode 100644
index f591645..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/public/robots.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-# http://www.robotstxt.org
-User-agent: *
-Disallow:

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/testem.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/testem.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/testem.js
deleted file mode 100644
index 30c6f0e..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/testem.js
+++ /dev/null
@@ -1,29 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-module.exports = {
-  "framework": "qunit",
-  "test_page": "tests/index.html?hidepassed",
-  "disable_watching": true,
-  "launch_in_ci": [
-    "PhantomJS"
-  ],
-  "launch_in_dev": [
-    "PhantomJS",
-    "Chrome"
-  ]
-};

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/.jshintrc
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/.jshintrc b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/.jshintrc
deleted file mode 100644
index 113e158..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/.jshintrc
+++ /dev/null
@@ -1,53 +0,0 @@
-{
-  "predef": [
-    "document",
-    "window",
-    "location",
-    "setTimeout",
-    "$",
-    "-Promise",
-    "define",
-    "console",
-    "visit",
-    "exists",
-    "fillIn",
-    "click",
-    "select",
-    "keyEvent",
-    "triggerEvent",
-    "find",
-    "findWithAssert",
-    "wait",
-    "DS",
-    "andThen",
-    "currentURL",
-    "currentPath",
-    "currentRouteName"
-  ],
-  "node": false,
-  "browser": false,
-  "boss": true,
-  "curly": true,
-  "debug": false,
-  "devel": false,
-  "eqeqeq": true,
-  "evil": true,
-  "forin": false,
-  "immed": false,
-  "laxbreak": false,
-  "newcap": true,
-  "noarg": true,
-  "noempty": false,
-  "nonew": false,
-  "nomen": false,
-  "onevar": false,
-  "plusplus": false,
-  "regexp": false,
-  "undef": true,
-  "sub": true,
-  "strict": false,
-  "white": false,
-  "eqnull": true,
-  "esnext": true,
-  "unused": true
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/destroy-app.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/destroy-app.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/destroy-app.js
deleted file mode 100644
index a0fb910..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/destroy-app.js
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-export default function destroyApp(application) {
-  Ember.run(application, 'destroy');
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/module-for-acceptance.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/module-for-acceptance.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/module-for-acceptance.js
deleted file mode 100644
index 541b4ed..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/module-for-acceptance.js
+++ /dev/null
@@ -1,40 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import { module } from 'qunit';
-import startApp from '../helpers/start-app';
-import destroyApp from '../helpers/destroy-app';
-
-export default function(name, options = {}) {
-  module(name, {
-    beforeEach() {
-      this.application = startApp();
-
-      if (options.beforeEach) {
-        options.beforeEach.apply(this, arguments);
-      }
-    },
-
-    afterEach() {
-      if (options.afterEach) {
-        options.afterEach.apply(this, arguments);
-      }
-
-      destroyApp(this.application);
-    }
-  });
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/resolver.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/resolver.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/resolver.js
deleted file mode 100644
index e64a9de..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/resolver.js
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Resolver from '../../resolver';
-import config from '../../config/environment';
-
-const resolver = Resolver.create();
-
-resolver.namespace = {
-  modulePrefix: config.modulePrefix,
-  podModulePrefix: config.podModulePrefix
-};
-
-export default resolver;

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/start-app.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/start-app.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/start-app.js
deleted file mode 100644
index 01841f7..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/helpers/start-app.js
+++ /dev/null
@@ -1,35 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-import Application from '../../app';
-import config from '../../config/environment';
-
-export default function startApp(attrs) {
-  let application;
-
-  let attributes = Ember.merge({}, config.APP);
-  attributes = Ember.merge(attributes, attrs); // use defaults, but you can override;
-
-  Ember.run(() => {
-    application = Application.create(attributes);
-    application.setupForTesting();
-    application.injectTestHelpers();
-  });
-
-  return application;
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/index.html
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/index.html b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/index.html
deleted file mode 100644
index ba794e5..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/index.html
+++ /dev/null
@@ -1,48 +0,0 @@
-<!---
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-<!DOCTYPE html>
-<html>
-  <head>
-    <meta charset="utf-8">
-    <meta http-equiv="X-UA-Compatible" content="IE=edge">
-    <title>HueambarimigrationView Tests</title>
-    <meta name="description" content="">
-    <meta name="viewport" content="width=device-width, initial-scale=1">
-
-    {{content-for "head"}}
-    {{content-for "test-head"}}
-
-    <link rel="stylesheet" href="assets/vendor.css">
-    <link rel="stylesheet" href="assets/hueambarimigration-view.css">
-    <link rel="stylesheet" href="assets/test-support.css">
-
-    {{content-for "head-footer"}}
-    {{content-for "test-head-footer"}}
-  </head>
-  <body>
-    {{content-for "body"}}
-    {{content-for "test-body"}}
-
-    <script src="testem.js" integrity=""></script>
-    <script src="assets/vendor.js"></script>
-    <script src="assets/test-support.js"></script>
-    <script src="assets/hueambarimigration-view.js"></script>
-    <script src="assets/tests.js"></script>
-    <script src="assets/test-loader.js"></script>
-
-    {{content-for "body-footer"}}
-    {{content-for "test-body-footer"}}
-  </body>
-</html>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/test-helper.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/test-helper.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/test-helper.js
deleted file mode 100644
index 4f1456b..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/tests/test-helper.js
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import resolver from './helpers/resolver';
-import registerSelectHelper from './helpers/register-select-helper';
-registerSelectHelper();
-import {
-  setResolver
-} from 'ember-qunit';
-
-setResolver(resolver);

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/vendor/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/vendor/.gitkeep b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/vendor/.gitkeep
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/package.json
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/package.json b/contrib/views/hueambarimigration/src/main/resources/ui/package.json
new file mode 100644
index 0000000..8d16a01
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/package.json
@@ -0,0 +1,27 @@
+{
+  "name": "huetoambari",
+  "version": "0.0.0",
+  "private": true,
+  "directories": {
+    "doc": "doc",
+    "test": "tests"
+  },
+  "scripts": {
+    "start": "ember server",
+    "build": "ember build",
+    "test": "ember test",
+    "preinstall": "chmod +x node/npm/bin/node-gyp-bin/node-gyp",
+    "postinstall": "bash node/with_new_path.sh node node_modules/.bin/bower --allow-root install"
+  },
+
+  "engines": {
+    "node": ">= 0.10.32"
+  },
+  "author": "",
+  "license": "MIT",
+  "devDependencies": {
+
+    "bower": ">= 1.3.12"
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/pigjobmigration.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/pigjobmigration.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/pigjobmigration.jsp
new file mode 100644
index 0000000..6e8471a
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/pigjobmigration.jsp
@@ -0,0 +1,233 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<title>bootstrap datepicker examples</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+<script type="text/javascript">
+	function pigjobquery1() {
+
+		var startdate = document.getElementById('startdate4').value;
+		var enddate = document.getElementById('enddate4').value;
+
+		var uname = document.getElementById("username4");
+		uname = uname.options[uname.selectedIndex].value;
+		var instance = document.getElementById("instance4");
+		instance = instance.options[instance.selectedIndex].value;
+
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			$('#progressbar').show();
+			$('#lines').hide();
+			pigjob(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+		}
+
+	}
+
+	function loadpercentage() {
+     	$.ajax({
+        url : "ProgressBarStatus",
+        success : function(result) {
+         $('#progressbarhivesavedquery').css('width', result);
+          console.log("Got the precentage completion "+ result);
+   			},
+
+       });
+  }
+
+	function pigjob(uname, startdate, enddate, instance) {
+
+		var url = "Pigjobsevlet?username=" + uname + "&startdate="
+				+ startdate + "&enddate=" + enddate + "&instance=" + instance;
+
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				clearInterval(interval);
+				$('#progressbar').hide()
+                $('#lines').show()
+			}
+		});
+
+	}
+</script>
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+
+</head>
+<%
+	ArrayList<String> username = new ArrayList<String>();
+	ArrayList<String> instancename = new ArrayList<String>();
+	int i;
+	
+	Connection conn = null;
+
+	 ServletContext context = request.getServletContext();
+     ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+	conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+	Statement stat = conn.createStatement();
+
+	ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+	while (rs.next()) {
+		username.add(rs.getString(2));
+	}
+
+	rs.close();
+
+	Connection c = null;
+	Statement stmt = null;
+	
+
+	c =  DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+	c.setAutoCommit(false);
+	stmt = c.createStatement();
+
+	ResultSet rs1=null;
+
+	if(view.getProperties().get("ambaridrivername").contains("oracle"))
+    		{
+    		 rs1 = stmt
+            	    .executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}'");
+    		}
+    		else
+    		{
+    		 rs1 = stmt
+            			.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}';");
+    		}
+
+
+	while (rs1.next()) {
+		instancename.add(rs1.getString(1));
+
+	}
+	rs1.close();
+	stmt.close();
+	
+%>
+<div class="row">
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="pigjobquery()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Pig Job Migration</h3>
+				</div>
+				<div class="panel-body">
+					<div class="row">
+						<div class="col-sm-3">
+							UserName<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username(*)" name="username4" id="username4"> -->
+							<select class="form-control" name="username4"
+								placeholder="User name" id="username4" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance4" id="instance4"> -->
+							<select class="form-control" name="instance4"
+								placeholder="Instance name" id="instance4" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate4"
+								id="startdate4">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate4"
+								id="enddate4">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="pigjobquery1()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+
+					<br>
+           <br>
+
+           <div class="progress" id="progressbar" style="display: none;">
+           <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+           </div>
+				</div>
+			</div>
+		</form>
+	</div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/pigscriptsmigration.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/pigscriptsmigration.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/pigscriptsmigration.jsp
new file mode 100644
index 0000000..090ebc9
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/pigscriptsmigration.jsp
@@ -0,0 +1,227 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<title>bootstrap datepicker examples</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+<script type="text/javascript">
+	function pigsavedquery() {
+		var startdate = document.getElementById('startdate3').value;
+		var enddate = document.getElementById('enddate3').value;
+
+		var uname = document.getElementById("username3");
+		uname = uname.options[uname.selectedIndex].value;
+		var instance = document.getElementById("instance3");
+		instance = instance.options[instance.selectedIndex].value;
+		//	alert("lets see");
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			console.log("Showing loading");
+			$('#progressbar').show();
+			$('#lines').hide();
+			pigsaved(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+
+		}
+
+	}
+
+	function loadpercentage() {
+  		$.ajax({
+        url : "ProgressBarStatus",
+         success : function(result) {
+           $('#progressbarhivesavedquery').css('width', result);
+           console.log("Got the precentage completion "+ result);
+         },
+
+      });
+
+    }
+
+	function pigsaved(uname, startdate, enddate, instance) {
+		//alert("savedquery");
+		var url = "PigServlet?username=" + uname + "&startdate=" + startdate
+				+ "&enddate=" + enddate + "&instance=" + instance;
+
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				clearInterval(interval);
+				$('#progressbar').hide()
+                $('#lines').show()
+			}
+		});
+
+	}
+</script>
+
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+
+</head>
+<%
+	ArrayList<String> username = new ArrayList<String>();
+	ArrayList<String> instancename = new ArrayList<String>();
+	int i;
+	
+	Connection conn = null;
+	 ServletContext context = request.getServletContext();
+     ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+	conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+	Statement stat = conn.createStatement();
+
+	ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+	while (rs.next()) {
+		username.add(rs.getString(2));
+	}
+
+	Connection c = null;
+	Statement stmt = null;
+
+	c =  DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+
+	stmt = c.createStatement();
+	ResultSet rs1=null;
+		if(view.getProperties().get("ambaridrivername").contains("oracle"))
+        		{
+        		 rs1 = stmt
+                			.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}'");
+        		}
+        		else
+        		{
+        		 rs1 = stmt
+                			.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}';");
+        		}
+	while (rs1.next()) {
+		instancename.add(rs1.getString(1));
+
+	}
+	rs1.close();
+	stmt.close();
+	
+%>
+<div class="row">
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="pigsavedquery()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Pig Saved Script Migration</h3>
+				</div>
+				<div class="panel-body">
+					<div class="row">
+						<div class="col-sm-3">
+							UserName<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username(*)" name="username3" id="username3"> -->
+							<select class="form-control" name="username3"
+								placeholder="User name" id="username3" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance3" id="instance3"> -->
+							<select class="form-control" name="instance3"
+								placeholder="Instance name" id="instance3" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate3"
+								id="startdate3">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate3"
+								id="enddate3">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="pigsavedquery()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+
+					 <br>
+           <br>
+            <div class="progress" id="progressbar" style="display: none;">
+            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+            </div>
+				</div>
+			</div>
+		</form>
+	</div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/revertchange.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/revertchange.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/revertchange.jsp
new file mode 100644
index 0000000..40774aa
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/revertchange.jsp
@@ -0,0 +1,203 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<html>
+<head>
+<title>bootstrap datepicker examples</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+  <script type="text/javascript" src="js/jquery.min.js"></script>
+  <script type="text/javascript" src="js/moment.min.js"></script>
+  <script type="text/javascript" src="js/bootstrap.min.js"></script>
+  <script type="text/javascript" src="js/bootstrap-datetimepicker.min.js"></script>
+
+  <link rel="stylesheet" href="css/bootstrap.min.css" />
+  <link rel="stylesheet" href="css/bootstrap-datetimepicker.min.css" />
+
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+</head>
+<%
+				int i;
+				ArrayList<String> instancename=new ArrayList<String>();          
+                Connection c = null;
+                Statement stmt = null;
+                ServletContext context = request.getServletContext();
+                ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+                c =  DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+	
+	stmt = c.createStatement();
+	ResultSet rs1=null;
+		if(view.getProperties().get("ambaridrivername").contains("oracle"))
+        		{
+        		 rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity");
+        		}
+        		else
+        		{
+        		 rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity;");
+        		}
+
+	while (rs1.next()) {
+		instancename.add(rs1.getString(1));
+ 
+	}
+	rs1.close();
+	stmt.close();
+	c.close();
+%>
+<div class="row">
+
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="validateAndSearch()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Revert Change</h3>
+				</div>
+				<div class="panel-body">
+					<p></p>
+					<p></p>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-6">
+							  &nbsp; &nbsp; Instance name<font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance4" id="instance4"> -->
+							<select class="form-control" name="instance"
+								placeholder="Instance name" id="instance" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for(i=0;i<instancename.size();i++)
+																	{
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<p></p>
+					<p></p>
+
+					<div class="row">
+						<div class="col-sm-6"> &nbsp; &nbsp; Enter the Time Upto which you want to
+							Revert</div>
+
+
+						<div class="container">
+                <div class="row">
+                    <div class='col-sm-3'>
+                        <div class="form-group">
+                            <div class='input-group date' id='datetimepicker1'>
+                                <input type='text' class="form-control"  id="startdate" name="startdate" />
+                                <span class="input-group-addon">
+                                    <span class="glyphicon glyphicon-calendar"></span>
+                                </span>
+                            </div>
+                        </div>
+                    </div>
+                    <script type="text/javascript">
+                        $(function () {
+                            $('#datetimepicker1').datetimepicker(
+                            {format : "YYYY-MM-DD HH:MM:SS"}
+                            );
+                        });
+                    </script>
+                </div>
+            </div>
+
+
+
+					</div>
+					<p></p>
+					<p></p>
+					<p></p>
+					<p></p>
+
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							&nbsp; &nbsp;<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="submittime()">
+						</div>
+					</div>
+					<div id="lines" style="display: none;">
+
+					 <div class="progress" id="progressbar" >
+                                            <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+
+                                            </div>
+
+				</div>
+			</div>
+		</form>
+
+	</div>
+</div>
+
+<script type="text/javascript">
+	function submittime() {
+	var strDatetime = $("#startdate").val();
+	var instance = document.getElementById("instance");
+   instance= instance.options[instance.selectedIndex].value;
+		
+		$('#progressbar').show();
+	    $('#lines').hide();
+		revertingchange(strDatetime,instance);
+		interval = setInterval(loadpercentage, 1000 );
+
+	}
+
+	function revertingchange(revertdate,instance) {
+		//alert("hello");
+		
+		var url = "RevertChange?revertdate="+revertdate+"&instance="+instance;
+		
+		$.ajax({url: url, success: function(result){
+			console.log("Got Result");
+			document.getElementById("lines").innerHTML = result;
+			$('#progressbar').hide()
+			$('#lines').show()
+			clearInterval(interval);
+   		 }});
+	}
+	function loadpercentage() {
+    	$.ajax({
+        url : "ProgressBarStatus",
+        success : function(result) {
+        $('#progressbarhivesavedquery').css('width', result);
+        console.log("Got the precentage completion "+ result);
+        },
+      });
+  }
+</script>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/view.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/view.xml b/contrib/views/hueambarimigration/src/main/resources/view.xml
index 3151d67..d43ab7f 100644
--- a/contrib/views/hueambarimigration/src/main/resources/view.xml
+++ b/contrib/views/hueambarimigration/src/main/resources/view.xml
@@ -126,102 +126,4 @@ limitations under the License. Kerberos, LDAP, Custom. Binary/Htt
     <placeholder>y/n</placeholder>
   </parameter>
 
-
-  <resource>
-    <name>ambaridatabases</name>
-    <plural-name>ambaridatabases</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.AmbariDatabaseCheck</service-class>
-  </resource>
-
-  <resource>
-    <name>huedatabases</name>
-    <plural-name>huedatabases</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.HueDatabaseCheck</service-class>
-  </resource>
-
-  <resource>
-    <name>huehttpurls</name>
-    <plural-name>huehttpurls</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.HueHttpUrlCheck</service-class>
-  </resource>
-
-  <resource>
-    <name>huewebhdfsurls</name>
-    <plural-name>huewebhdfsurls</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.HueWebHdfsCheck</service-class>
-  </resource>
-
-  <resource>
-    <name>ambariwebhdfsurls</name>
-    <plural-name>ambariwebhdfsurls</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.AmbariWebHdfsCheck</service-class>
-  </resource>
-
-
-
-
-  <resource>
-    <name>checkprogresses</name>
-    <plural-name>checkprogresses</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.CheckProgresStatus</service-class>
-  </resource>
-
-
-  <resource>
-    <name>startmigrations</name>
-    <plural-name>startmigrations</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.InitiateJobMigration</service-class>
-  </resource>
-
-  <resource>
-    <name>returnjobids</name>
-    <plural-name>returnjobids</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.CreateJobId</service-class>
-  </resource>
-
-  <resource>
-    <name>returnjobidforrevertchanges</name>
-    <plural-name>returnjobidforrevertchanges</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.CreateJobIdRevertChange</service-class>
-  </resource>
-
-  <resource>
-    <name>startrevertchanges</name>
-    <plural-name>startrevertchanges</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.InitiateJobMigrationforRevertchange</service-class>
-  </resource>
-
-  <resource>
-    <name>usersdetails</name>
-    <plural-name>usersdetails</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.UserDetailHue</service-class>
-  </resource>
-
-  <resource>
-    <name>hiveinstancedetails</name>
-    <plural-name>hiveinstancedetails</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.HiveInstanceDetailsAmbari</service-class>
-  </resource>
-
-  <resource>
-    <name>allinstancedetails</name>
-    <plural-name>allinstancedetails</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.AllInstanceDetailsAmbari</service-class>
-  </resource>
-
-  <resource>
-    <name>piginstancedetails</name>
-    <plural-name>piginstancedetails</plural-name>
-    <service-class>org.apache.ambari.view.huetoambarimigration.migration.configuration.PigInstanceDetailsAmbari</service-class>
-  </resource>
-
-  <persistence>
-    <entity>
-      <class>org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse</class>
-      <id-property>id</id-property>
-    </entity>
-    </persistence>
-
-
-
 </view>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/pom.xml b/contrib/views/pom.xml
index 8cdf791..0114013 100644
--- a/contrib/views/pom.xml
+++ b/contrib/views/pom.xml
@@ -77,9 +77,6 @@
             <exclude>.settings</exclude>
             <exclude>**/target/**</exclude>
             <exclude>**/.gitkeep</exclude>
-            <exclude>**/.ember-cli</exclude>
-            <exclude>**/.travis.yml</exclude>
-            <exclude>**/.watchmanconfig</exclude>
             <exclude>**/.bowerrc</exclude>
             <exclude>**/.editorconfig</exclude>
             <exclude>**/.jshintrc</exclude>


[07/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java
deleted file mode 100644
index bcbe4de..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsUtility.java
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.*;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail.*;
-
-
-public class HiveInstanceDetailsUtility {
-
-  public List<InstanceModel> getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
-
-    List<InstanceModel> instancelist = new ArrayList<>();
-    Connection conn = null;
-    conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
-    conn.setAutoCommit(false);
-    PreparedStatement prSt;
-
-    QuerySetAmbariDB ambaridatabase = null;
-
-    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
-      ambaridatabase = new MysqlQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
-      ambaridatabase = new PostgressQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
-      ambaridatabase = new OracleQuerySetAmbariDB();
-    }
-
-    ResultSet rs1 = null;
-    prSt = ambaridatabase.getHiveInstanceDeatil(conn);
-    rs1 = prSt.executeQuery();
-    int i = 0;
-
-    while (rs1.next()) {
-      InstanceModel I = new InstanceModel();
-      I.setInstanceName(rs1.getString(1));
-      I.setId(i);
-      instancelist.add(I);
-      i++;
-    }
-    return instancelist;
-
-  }
-
-  public List<InstanceModel> getAllInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
-
-    List<InstanceModel> instancelist = new ArrayList<>();
-    Connection conn = null;
-    Statement stmt = null;
-    conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
-    conn.setAutoCommit(false);
-    PreparedStatement prSt;
-
-    QuerySetAmbariDB ambaridatabase = null;
-
-    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
-      ambaridatabase = new MysqlQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
-      ambaridatabase = new PostgressQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
-      ambaridatabase = new OracleQuerySetAmbariDB();
-    }
-
-    ResultSet rs1 = null;
-    int i = 0;
-    prSt = ambaridatabase.getAllInstanceDeatil(conn);
-    rs1 = prSt.executeQuery();
-
-    while (rs1.next()) {
-      InstanceModel I = new InstanceModel();
-      I.setInstanceName(rs1.getString(1));
-      I.setId(i);
-      instancelist.add(I);
-      i++;
-    }
-    rs1.close();
-    return instancelist;
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java
deleted file mode 100644
index a5ec758..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueDatabaseCheck.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-
-/**
- * Service class to check hue database check
- */
-@Path("/huedatabases")
-
-public class HueDatabaseCheck {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response hueDatabase() throws IOException {
-    JSONObject response = new JSONObject();
-    try {
-      response.put("huedatabase", ConfigurationCheckImplementation.checkHueDatabaseConnection(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")));
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java
deleted file mode 100644
index dd5f409..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueHttpUrlCheck.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-
-/**
- * Service class to check hue http url
- */
-@Path("/huehttpurls")
-
-public class HueHttpUrlCheck {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response hueHtttpUrl()  {
-
-    JSONObject response = new JSONObject();
-    try {
-      response.put("huehttpurl", ConfigurationCheckImplementation.checkConfigurationForHue(view.getProperties().get("Hue_URL")));
-      return Response.ok(response).build();
-
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java
deleted file mode 100644
index 7b5e7a0..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HueWebHdfsCheck.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.net.URISyntaxException;
-
-/**
- * Service class to check hueWebHdfs
- */
-
-@Path("/huewebhdfsurls")
-
-public class HueWebHdfsCheck {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response hueWebHdfs() throws IOException, URISyntaxException {
-
-    JSONObject response = new JSONObject();
-    try {
-      response.put("huewebhdfsurl", ConfigurationCheckImplementation.checkNamenodeURIConnectionforHue(view.getProperties().get("namenode_URI_Hue")));
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java
deleted file mode 100644
index cc14385..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsAmbari.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.SQLException;
-
-/**
- * Service class to fetch Piginstance detail
- */
-
-@Path("/piginstancedetails")
-
-public class PigInstanceDetailsAmbari {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response instancelist() throws IOException, PropertyVetoException, SQLException {
-
-
-    PigInstanceDetailsUtility instance=new PigInstanceDetailsUtility();
-
-    JSONObject response = new JSONObject();
-    response.put("piginstancedetails",instance.getInstancedetails(view));
-    return Response.ok(response).build();
-
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java
deleted file mode 100644
index d3cfb97..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/PigInstanceDetailsUtility.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.InstanceModel;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.*;
-import java.util.ArrayList;
-import java.util.List;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail.*;
-
-/**
- * Utility class to fetch Pig Instance details
- */
-
-public class PigInstanceDetailsUtility {
-
-  public List<InstanceModel> getInstancedetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
-
-    List<InstanceModel> instancelist = new ArrayList<>();
-    Connection conn = null;
-    Statement stmt = null;
-    PreparedStatement prSt;
-    conn = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
-    conn.setAutoCommit(false);
-    stmt = conn.createStatement();
-    int i = 0;
-
-    QuerySetAmbariDB ambaridatabase = null;
-
-    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
-      ambaridatabase = new MysqlQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
-      ambaridatabase = new PostgressQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
-      ambaridatabase = new OracleQuerySetAmbariDB();
-    }
-
-    ResultSet rs1 = null;
-
-    prSt = ambaridatabase.getAllPigInstance(conn);
-
-    rs1 = prSt.executeQuery();
-
-    while (rs1.next()) {
-      InstanceModel I = new InstanceModel();
-      I.setInstanceName(rs1.getString(1));
-      I.setId(i);
-      instancelist.add(I);
-      i++;
-    }
-    rs1.close();
-    stmt.close();
-    return instancelist;
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java
deleted file mode 100644
index d993f3a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailHue.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.SQLException;
-
-/**
- * Service class to fetch user detail
- */
-
-@Path("/usersdetails")
-
-public class UserDetailHue {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response userList() throws IOException, PropertyVetoException, SQLException {
-
-    UserDetailsUtility user=new UserDetailsUtility();
-
-    JSONObject response = new JSONObject();
-    response.put("usersdetails",user.getUserDetails(view));
-    return Response.ok(response).build();
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java
deleted file mode 100644
index 09f175b..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/UserDetailsUtility.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.UserModel;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.*;
-import java.util.ArrayList;
-import java.util.List;
-import  org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails.*;
-
-
-public class UserDetailsUtility {
-
-
-  public List<UserModel> getUserDetails(ViewContext view) throws PropertyVetoException, SQLException, IOException {
-
-    List<UserModel> userlist=new ArrayList<>();
-    Connection conn = null;
-    Statement stmt = null;
-    conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
-    conn.setAutoCommit(false);
-    stmt = conn.createStatement();
-    UserModel all=new UserModel();
-    all.setId(-1);
-    all.setUsername("all");
-    PreparedStatement prSt;
-    userlist.add(all);
-    ResultSet rs1=null;
-
-    QuerySet huedatabase = null;
-
-    if (view.getProperties().get("huedrivername").contains("mysql")) {
-      huedatabase = new MysqlQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
-      huedatabase = new PostgressQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
-      huedatabase = new SqliteQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
-      huedatabase = new OracleQuerySet();
-    }
-
-    prSt = huedatabase.getUserDetails(conn);
-
-    rs1 = prSt.executeQuery();
-
-    while (rs1.next()) {
-      UserModel I=new UserModel();
-      I.setUsername(rs1.getString(2));
-      I.setId(rs1.getInt(1));
-      System.out.println(rs1.getString(2));
-      userlist.add(I);
-    }
-    rs1.close();
-    stmt.close();
-    return userlist;
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
deleted file mode 100755
index bdcf293..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryMigrationUtility.java
+++ /dev/null
@@ -1,255 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.migration.InitiateJobMigration;
-import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.MysqlQuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.OracleQuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.PostgressQuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.QuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset.*;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.log4j.Logger;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-
-public class HiveHistoryMigrationUtility {
-
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-
-  public void hiveHistoryQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
-
-    InitiateJobMigration migrationservice = new InitiateJobMigration();
-
-    long startTime = System.currentTimeMillis();
-
-    final Logger logger = Logger.getLogger(HiveHistoryMigrationUtility.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    logger.info("--------------------------------------");
-    logger.info("hive History query Migration started");
-    logger.info("--------------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + username);
-    logger.info("hue username is : " + instance);
-
-    MigrationModel model = new MigrationModel();
-
-    int maxCountOfAmbariDb, i = 0;
-    String time = null;
-    Long epochTime = null;
-    String dirNameforHiveHistroy;
-    ArrayList<HiveModel> dbpojoHiveHistoryQuery = new ArrayList<HiveModel>();
-
-    HiveHistoryQueryMigrationImplementation hiveHistoryQueryImpl = new HiveHistoryQueryMigrationImplementation();// creating objects of HiveHistroy implementation
-
-    QuerySet huedatabase = null;
-
-    /*instanciang queryset
-    * according to driver name
-    */
-
-    if (view.getProperties().get("huedrivername").contains("mysql")) {
-      huedatabase = new MysqlQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
-      huedatabase = new PostgressQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
-      huedatabase = new SqliteQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
-      huedatabase = new OracleQuerySet();
-    }
-
-
-    QuerySetAmbariDB ambaridatabase = null;
-
-
-    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
-      ambaridatabase = new MysqlQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
-      ambaridatabase = new PostgressQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
-      ambaridatabase = new OracleQuerySetAmbariDB();
-    }
-
-
-    try {
-
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
-
-      dbpojoHiveHistoryQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb, huedatabase);
-
-      for (int j = 0; j < dbpojoHiveHistoryQuery.size(); j++) {
-        logger.info("the query fetched from hue" + dbpojoHiveHistoryQuery.get(j).getQuery());
-
-      }
-
-		   /* if No migration query selected from Hue Database according to our search criteria */
-
-      if (dbpojoHiveHistoryQuery.size() == 0) {
-        migrationresult.setIsNoQuerySelected("yes");
-        migrationresult.setProgressPercentage(0);
-        migrationresult.setNumberOfQueryTransfered(0);
-        migrationresult.setTotalNoQuery(dbpojoHiveHistoryQuery.size());
-        getResourceManager(view).update(migrationresult, jobid);
-        logger.info("No queries has been selected acccording to your criteria");
-
-      } else {
-        /* If hive queries are selected based on our search criteria */
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
-        connectionAmbaridb.setAutoCommit(false);
-
-        // for each queries fetched from Hue database//
-
-        for (i = 0; i < dbpojoHiveHistoryQuery.size(); i++) {
-
-          float calc = ((float) (i + 1)) / dbpojoHiveHistoryQuery.size() * 100;
-          int progressPercentage = Math.round(calc);
-          migrationresult.setIsNoQuerySelected("no");
-          migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i + 1);
-          migrationresult.setTotalNoQuery(dbpojoHiveHistoryQuery.size());
-          getResourceManager(view).update(migrationresult, jobid);
-
-          logger.info("_____________________");
-          logger.info("Loop No." + (i + 1));
-          logger.info("_____________________");
-          logger.info("Hue query that has been fetched" + dbpojoHiveHistoryQuery.get(i).getQuery());
-          int id = 0;
-
-          id = hiveHistoryQueryImpl.fetchInstanceTablename(connectionAmbaridb, instance, ambaridatabase); // feching table name according to the given instance name
-
-          logger.info("Table name has been fetched from intance name");
-
-          hiveHistoryQueryImpl.writetoFileQueryhql(dbpojoHiveHistoryQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir());// writing to .hql file to a temp file on local disk
-
-          logger.info(".hql file created in Temp directory");
-
-          hiveHistoryQueryImpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());// writing to logs file to a temp file on local disk
-
-          logger.info("Log file created in Temp directory");
-
-          maxCountOfAmbariDb = (hiveHistoryQueryImpl.fetchMaximumIdfromAmbaridb(connectionAmbaridb, id, ambaridatabase) + 1);// fetching the maximum count for ambari db to insert
-
-          time = hiveHistoryQueryImpl.getTime();// getting the system current time.
-
-          epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
-
-          dirNameforHiveHistroy = "/user/admin/migration/jobs/migration-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
-
-          logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
-
-          hiveHistoryQueryImpl.insertRowinAmbaridb(dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i, ambaridatabase);// inserting in ambari database
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            logger.info("kerberose enabled");
-            hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in kerborized secured hdfs
-            logger.info("Directory created in hdfs");
-            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to kerborized hdfs
-            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to kerborized hdfs
-          } else {
-
-            logger.info("kerberose not enabled");
-            hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
-            logger.info("Directory created in hdfs");
-            hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to hdfs
-            hiveHistoryQueryImpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to hdfs
-          }
-
-        }
-        connectionAmbaridb.commit();
-
-
-      }
-    } catch (SQLException e) {
-      logger.error("Sql exception in ambari database: ", e);
-      try {
-        connectionAmbaridb.rollback();
-        model.setIfSuccess(false);
-        logger.error("Sql statement are Rolledback");
-      } catch (SQLException e1) {
-        logger.error("Sql rollback exception in ambari database",
-          e1);
-      }
-    } catch (ClassNotFoundException e) {
-      logger.error("Class not found :- ", e);
-    } catch (ParseException e) {
-      logger.error("Parse Exception : ", e);
-    } catch (URISyntaxException e) {
-      logger.error("URI Syntax Exception: ", e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: ", e);
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    } finally {
-      if (connectionAmbaridb != null) try {
-        connectionAmbaridb.close();
-      } catch (SQLException e) {
-        logger.error("Exception in closing the connection :", e);
-      }
-    }
-    //deleteing the temprary files that are created while execution
-    hiveHistoryQueryImpl.deleteFileQueryhql(ConfigurationCheckImplementation.getHomeDir());
-    hiveHistoryQueryImpl.deleteFileQueryLogs(ConfigurationCheckImplementation.getHomeDir());
-
-    //session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-    logger.info("------------------------------");
-    logger.info("hive History query Migration Ends");
-    logger.info("------------------------------");
-
-    long stopTime = System.currentTimeMillis();
-    long elapsedTime = stopTime - startTime;
-
-    migrationresult.setJobtype("hivehistoryquerymigration");
-    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-    getResourceManager(view).update(migrationresult, jobid);
-
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
deleted file mode 100644
index a0182f6..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryQueryMigrationImplementation.java
+++ /dev/null
@@ -1,551 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset.QuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset.QuerySet;
-import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-
-import java.io.*;
-import java.net.URISyntaxException;
-import java.security.PrivilegedExceptionAction;
-import java.sql.*;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-
-public class HiveHistoryQueryMigrationImplementation {
-
-  static final Logger logger = Logger.getLogger(HiveHistoryQueryMigrationImplementation.class);
-
-  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
-
-    Date dNow = new Date();
-    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
-    String currentDate = ft.format(dNow);
-
-    XMLOutputter xmlOutput = new XMLOutputter();
-    xmlOutput.setFormat(Format.getPrettyFormat());
-
-    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
-
-    if (xmlfile.exists()) {
-      String iteration = Integer.toString(i + 1);
-      SAXBuilder builder = new SAXBuilder();
-      Document doc;
-      try {
-        doc = (Document) builder.build(xmlfile);
-        Element rootNode = doc.getRootElement();
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-        rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
-
-      } catch (JDOMException e) {
-        logger.error("JDOMException", e);
-
-      }
-
-    } else {
-
-      try {
-        String iteration = Integer.toString(i + 1);
-        Element revertrecord = new Element("RevertChangePage");
-        Document doc = new Document(revertrecord);
-        doc.setRootElement(revertrecord);
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-        doc.getRootElement().addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
-      } catch (IOException io) {
-        logger.error("JDOMException", io);
-      }
-
-    }
-
-  }
-
-  public int fetchMaximumIdfromAmbaridb(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-    String ds_id = null;
-    ResultSet rs = null;
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-  }
-
-
-  public void insertRowinAmbaridb(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
-
-    String maxcount1 = Integer.toString(maxcount);
-    String epochtime1 = Long.toString(epochtime);
-    PreparedStatement prSt = null;
-    String revsql = null;
-
-    prSt = ambaridatabase.insertToHiveHistory(c, id, maxcount1, epochtime, dirname);
-
-    logger.info("The actual insert statement is " + prSt);
-
-    prSt.executeUpdate();
-
-    revsql = ambaridatabase.RevertSql(id, maxcount1);
-
-    logger.info("adding revert sql hive history");
-
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
-
-
-  }
-
-  public int fetchInstanceTablename(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-    PreparedStatement prSt = null;
-
-
-    ResultSet rs = null;
-
-
-    prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
-
-    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-    }
-    return id;
-  }
-
-  public long getEpochTime() throws ParseException {
-    long seconds = System.currentTimeMillis() / 1000l;
-    return seconds;
-
-  }
-
-  public String getTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-
-    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
-      + minute;
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
-      + minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-    return s;
-
-  }
-
-  public ArrayList<HiveModel> fetchFromHue(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, SQLException {
-    int id = 0;
-    int i = 0;
-    ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();
-
-
-    try {
-      connection.setAutoCommit(false);
-      PreparedStatement prSt = null;
-      Statement statement = connection.createStatement();
-      String query;
-      ResultSet rs;
-
-      ResultSet rs1 = null;
-      if (username.equals("all")) {
-      } else {
-
-
-        prSt = huedatabase.getUseridfromUserName(connection, username);
-
-        rs = prSt.executeQuery();
-
-        while (rs.next()) {
-          id = rs.getInt("id");
-        }
-      }
-
-      if (startdate.equals("") && endtime.equals("")) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
-
-        }
-
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
-
-        }
-      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
-
-        }
-
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
-        }
-      }
-
-      rs1 = prSt.executeQuery();
-
-      while (rs1.next()) {
-        HiveModel hivepojo = new HiveModel();
-        query = rs1.getString("query");
-        hivepojo.setQuery(query);
-        hiveArrayList.add(hivepojo);
-        i++;
-      }
-
-      connection.commit();
-
-    } catch (SQLException e) {
-      connection.rollback();
-
-    } finally {
-      try {
-        if (connection != null)
-          connection.close();
-      } catch (SQLException e) {
-        logger.error("Sql exception error: " + e);
-      }
-    }
-    return hiveArrayList;
-
-  }
-
-  public void writetoFileQueryhql(String content, String homedir) {
-    try {
-      File file = new File(homedir + "query.hql");
-      // if file doesnt exists, then create it
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(content);
-      bw.close();
-    } catch (IOException e) {
-      logger.error("IOException", e);
-    }
-
-  }
-
-  public void deleteFileQueryhql(String homedir) {
-    try {
-      File file = new File(homedir + "query.hql");
-
-      if (file.delete()) {
-        logger.info("temporary hql file deleted");
-      } else {
-        logger.info("temporary hql file delete failed");
-      }
-
-    } catch (Exception e) {
-
-      logger.error("File Exception ", e);
-
-    }
-
-  }
-
-  public void deleteFileQueryLogs(String homedir) {
-    try {
-      File file = new File(homedir + "logs");
-
-      if (file.delete()) {
-        logger.info("temporary logs file deleted");
-      } else {
-        logger.info("temporary logs file delete failed");
-      }
-
-    } catch (Exception e) {
-
-      logger.error("File Exception ", e);
-
-    }
-
-  }
-
-  public void writetoFileLogs(String homedir) {
-    try {
-      String content = "";
-      File file = new File(homedir + "logs");
-      // if file doesnt exists, then create it
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(content);
-      bw.close();
-    } catch (IOException e) {
-      logger.error("IOException", e);
-    }
-
-  }
-
-  public void createDir(final String dir, final String namenodeuri) throws IOException,
-    URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      UserGroupInformation.setConfiguration(conf);
-
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-
-        public Boolean run() throws Exception {
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          Boolean b = fs.mkdirs(src);
-          return b;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Exception in Webhdfs", e);
-    }
-  }
-
-  public void createDirKerberorisedSecured(final String dir, final String namenodeuri) throws IOException,
-    URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
-
-        public Boolean run() throws Exception {
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          Boolean b = fs.mkdirs(src);
-          return b;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Exception in Webhdfs", e);
-    }
-  }
-
-
-  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
-    throws IOException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration conf = new Configuration();
-          conf.set("fs.defaultFS", namenodeuri);
-          conf.set("hadoop.job.ugi", "hdfs");
-          conf.set("fs.hdfs.impl",
-            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-          );
-          conf.set("fs.file.impl",
-            org.apache.hadoop.fs.LocalFileSystem.class.getName()
-          );
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          //	Path pathsource = new Path(source);
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception", e);
-    }
-
-  }
-
-  public void putFileinHdfsKerborizedSecured(final String source, final String dest, final String namenodeuri)
-    throws IOException {
-
-    try {
-
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception", e);
-
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
deleted file mode 100644
index 5228bf6..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/historyquery/HiveHistoryStartJob.java
+++ /dev/null
@@ -1,74 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class HiveHistoryStartJob extends Thread{
-
-  String username;
-  String instance;
-  String startdate;
-  String enddate;
-  String jobid;
-  ViewContext view;
-
-  public HiveHistoryStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
-    this.username = username;
-    this.instance=instance;
-    this.startdate=startdate;
-    this.enddate=enddate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setUserNameofhue(username);
-    migrationresult.setProgressPercentage(0);
-
-    JSONObject response = new JSONObject();
-
-    /**
-     * creating a separete thread
-     */
-
-    HiveHistoryMigrationUtility hivehistoryquery=new HiveHistoryMigrationUtility();
-    try {
-      hivehistoryquery.hiveHistoryQueryMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
deleted file mode 100644
index 7bd48b2..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationImplementation.java
+++ /dev/null
@@ -1,673 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
-
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.QuerySet;
-import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-import org.json.JSONObject;
-
-import java.io.*;
-import java.net.URISyntaxException;
-import java.nio.charset.Charset;
-import java.security.PrivilegedExceptionAction;
-import java.sql.*;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-
-public class HiveSavedQueryMigrationImplementation {
-
-  static final Logger logger = Logger.getLogger(HiveSavedQueryMigrationImplementation.class);
-
-  private static String readAll(Reader rd) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    int cp;
-    while ((cp = rd.read()) != -1) {
-      sb.append((char) cp);
-    }
-    return sb.toString();
-  }
-
-  public void wrtitetoalternatesqlfile(String dirname, String content,
-                                       String instance, int i) throws IOException {
-
-    Date dNow = new Date();
-    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
-    String currentDate = ft.format(dNow);
-
-    XMLOutputter xmlOutput = new XMLOutputter();
-
-    xmlOutput.setFormat(Format.getPrettyFormat());
-
-    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
-
-    if (xmlfile.exists()) {
-      String iteration = Integer.toString(i + 1);
-      SAXBuilder builder = new SAXBuilder();
-      Document doc;
-      try {
-        doc = (Document) builder.build(xmlfile);
-
-        Element rootNode = doc.getRootElement();
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-
-        rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
-
-      } catch (JDOMException e) {
-        // TODO Auto-generated catch block
-        logger.error("JDOMException: ", e);
-      }
-
-    } else {
-
-      try {
-        String iteration = Integer.toString(i + 1);
-        Element revertrecord = new Element("RevertChangePage");
-        Document doc = new Document(revertrecord);
-        doc.setRootElement(revertrecord);
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate
-          .toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-
-        doc.getRootElement().addContent(record);
-
-        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
-
-      } catch (IOException io) {
-
-      }
-
-    }
-
-  }
-
-  public int fetchMaxidforSavedQueryHive(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-    String ds_id = null;
-    ResultSet rs = null;
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.getMaxDsIdFromTableIdSavedquery(c, id);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-  }
-
-  public int fetchInstancetablenameForSavedqueryHive(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-    PreparedStatement prSt = null;
-
-
-    ResultSet rs = null;
-
-
-    prSt = ambaridatabase.getTableIdFromInstanceNameSavedquery(c, instance);
-
-    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-    }
-    return id;
-  }
-
-  public int fetchInstanceTablenameHiveHistory(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-    PreparedStatement prSt = null;
-
-
-    ResultSet rs = null;
-
-
-    prSt = ambaridatabase.getTableIdFromInstanceNameHistoryquery(c, instance);
-
-    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-    }
-    return id;
-
-  }
-
-  public int fetchMaxdsidFromHiveHistory(Connection c, int id, QuerySetAmbariDB ambaridatabase)
-    throws SQLException {
-
-    String ds_id = null;
-    ResultSet rs = null;
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.getMaxDsIdFromTableIdHistoryquery(c, id);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-  }
-
-
-  /**/
-  public void insertRowHiveHistory(String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase)
-    throws SQLException, IOException {
-
-    String maxcount1 = Integer.toString(maxcount);
-    String epochtime1 = Long.toString(epochtime);
-    PreparedStatement prSt = null;
-    String revsql = null;
-
-    prSt = ambaridatabase.insertToHiveHistory(c, id, maxcount1, epochtime, dirname);
-
-    System.out.println("the actual query is " + prSt);
-
-    logger.info("The actual insert statement is " + prSt);
-
-    prSt.executeUpdate();
-
-    revsql = ambaridatabase.revertSqlHistoryQuery(id, maxcount1);
-
-    logger.info("adding revert sqlsavedquery in hivehistory ");
-
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
-  }
-
-  public void insertRowinSavedQuery(int maxcount, String database, String dirname, String query, String name, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
-
-    String maxcount1 = Integer.toString(maxcount);
-    String revsql = null;
-
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.insertToHiveSavedQuery(c, id, maxcount1, database, dirname, query, name);
-
-    System.out.println("the actual query is " + prSt);
-
-    logger.info("The actual insert statement is " + prSt);
-
-    prSt.executeUpdate();
-
-    revsql = ambaridatabase.revertSqlSavedQuery(id, maxcount1);
-
-    logger.info("adding revert sqlsavedquery ");
-
-    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
-
-  }
-
-  public long getEpochTime() throws ParseException {
-
-    long seconds = System.currentTimeMillis() / 1000l;
-    return seconds;
-
-  }
-
-  public String getTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-
-    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
-      + minute;
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
-      + minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-
-    return s;
-
-  }
-
-  public ArrayList<HiveModel> fetchFromHuedb(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase)
-    throws ClassNotFoundException, IOException {
-    int id = 0;
-    int i = 0;
-    String[] query = new String[100];
-    ArrayList<HiveModel> hiveArrayList = new ArrayList<HiveModel>();
-    ResultSet rs1 = null;
-
-    try {
-      Statement statement = connection.createStatement();
-      connection.setAutoCommit(false);
-      PreparedStatement prSt = null;
-      ResultSet rs;
-      if (username.equals("all")) {
-      } else {
-
-        prSt = huedatabase.getUseridfromUserName(connection, username);
-
-        rs = prSt.executeQuery();
-
-        while (rs.next()) {
-          id = rs.getInt("id");
-        }
-      }
-
-      if (startdate.equals("") && endtime.equals("")) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
-
-        }
-
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
-
-        }
-      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
-
-        }
-
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
-        }
-
-      }
-
-      rs1 = prSt.executeQuery();
-
-
-      while (rs1.next()) {
-        HiveModel hivepojo = new HiveModel();
-        String name = rs1.getString("name");
-        String temp = rs1.getString("data");
-        InputStream is = new ByteArrayInputStream(temp.getBytes());
-        BufferedReader rd = new BufferedReader(new InputStreamReader(
-          is, Charset.forName("UTF-8")));
-        String jsonText = readAll(rd);
-        JSONObject json = new JSONObject(jsonText);
-        String resources = json.get("query").toString();
-        json = new JSONObject(resources);
-
-        String resarr = (json.get("query")).toString();
-
-        json = new JSONObject(resources);
-        String database = (json.get("database")).toString();
-        hivepojo.setQuery(resarr);
-        hivepojo.setDatabase(database);
-        hivepojo.setOwner(name);
-        hiveArrayList.add(hivepojo);
-        i++;
-      }
-
-    } catch (SQLException e2) {
-      e2.printStackTrace();
-    } finally
-
-    {
-      try {
-        if (connection != null)
-          connection.close();
-      } catch (SQLException e) {
-        logger.error("sql connection exception", e);
-      }
-    }
-
-    return hiveArrayList;
-
-  }
-
-
-  public void writetoFilequeryHql(String content, String homedir) {
-    try {
-      File file = new File(homedir + "query.hql");
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(content);
-      bw.close();
-
-    } catch (IOException e) {
-      logger.error("IOException: ", e);
-    }
-
-  }
-
-  public void deleteFileQueryhql(String homedir) {
-    try {
-      File file = new File(homedir + "query.hql");
-
-      if (file.delete()) {
-        logger.info("temporary hql file deleted");
-      } else {
-        logger.info("temporary hql file delete failed");
-      }
-
-    } catch (Exception e) {
-
-      logger.error("File Exception ", e);
-
-    }
-
-  }
-
-  public void deleteFileQueryLogs(String homedir) {
-    try {
-      File file = new File(homedir + "logs");
-
-      if (file.delete()) {
-        logger.info("temporary logs file deleted");
-      } else {
-        logger.info("temporary logs file delete failed");
-      }
-
-    } catch (Exception e) {
-
-      logger.error("File Exception ", e);
-
-    }
-
-  }
-
-
-  public void writetoFileLogs(String homedir) {
-    try {
-
-      String content = "";
-      File file = new File(homedir + "logs");
-
-      // if file doesnt exists, then create it
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(content);
-      bw.close();
-
-    } catch (IOException e) {
-      logger.error("IOException: ", e);
-    }
-
-  }
-
-  public void createDirHive(final String dir, final String namenodeuri)
-    throws IOException, URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs: ", e);
-    }
-  }
-
-  public void createDirHiveSecured(final String dir, final String namenodeuri)
-    throws IOException, URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs: ", e);
-    }
-  }
-
-  public void putFileinHdfs(final String source, final String dest,
-                            final String namenodeuri) throws IOException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fileSystem = FileSystem.get(conf);
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception", e);
-    }
-
-  }
-
-
-  public void putFileinHdfsSecured(final String source, final String dest,
-                                   final String namenodeuri) throws IOException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          // Path pathsource = new Path(source);
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-
-
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception", e);
-    }
-
-  }
-
-}


[03/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java
new file mode 100644
index 0000000..614c171
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigJobImpl.java
@@ -0,0 +1,563 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.pig;
+
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+
+public class PigJobImpl {
+
+  static final Logger logger = Logger.getLogger(PigJobImpl.class);
+
+  private static String readAll(Reader rd) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    int cp;
+    while ((cp = rd.read()) != -1) {
+      sb.append((char) cp);
+    }
+    return sb.toString();
+  }
+
+  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+    XMLOutputter xmlOutput = new XMLOutputter();
+    xmlOutput.setFormat(Format.getPrettyFormat());
+    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+        Element rootNode = doc.getRootElement();
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+      } catch (JDOMException e) {
+
+        logger.error("Jdom Exception: " , e);
+      }
+
+
+    } else {
+      // create
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        doc.getRootElement().addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+      } catch (IOException io) {
+        logger.error("Jdom Exception: " , io);
+      }
+
+    }
+
+  }
+
+  public int fetchMaxIdforPigJob(String driverName, Connection c, int id) throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+    ResultSet rs = null;
+
+    stmt = c.createStatement();
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_PIGJOB_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id);
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+
+    return num;
+
+  }
+
+  public int fetchInstanceTablename(String driverName, Connection c, String instance) throws SQLException {
+
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+    stmt = c.createStatement();
+
+    ResultSet rs = null;
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name='" + instance + "';");
+    }
+    while (rs.next()) {
+      id = rs.getInt("id");
+
+    }
+
+    return id;
+  }
+
+  public void insertRowPigJob(String driverName, String dirname, int maxcountforpigjob, String time, String time2, long epochtime, String title, Connection c, int id, String status, String instance, int i) throws SQLException, IOException {
+
+    String epochtime1 = Long.toString(epochtime);
+    String ds_id = new String();
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    String sql = "";
+    String revsql = "";
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_PIGJOB_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "');";
+      revsql = "delete from  DS_PIGJOB_" + id + " where ds_id='" + maxcountforpigjob + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_pigjob_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "');";
+      revsql = "delete from  ds_pigjob_" + id + " where ds_id='" + maxcountforpigjob + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_pigjob_" + id + " values ('" + maxcountforpigjob + "'," + epochtime1 + ",0,'','f','','','admin',0,'" + dirname + "script.pig','','" + maxcountforpigjob + "','','','" + status + "','" + dirname + "','','" + title + "')";
+      revsql = "delete from  ds_pigjob_" + id + " where ds_id='" + maxcountforpigjob + "'";
+
+    }
+
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+    stmt.executeUpdate(sql);
+
+  }
+
+  public long getEpochTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+    return epoch;
+
+  }
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+    return s;
+
+  }
+
+  public String getTimeInorder() throws ParseException {
+    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
+    Date now = new Date();
+    String strDate = sdfDate.format(now);
+    return strDate;
+  }
+
+  public ArrayList<PojoPig> fetchFromHueDB(String username, String startdate, String endtime, Connection connection) throws ClassNotFoundException, IOException {
+    int id = 0;
+    int i = 0;
+    String[] query = new String[100];
+    ArrayList<PojoPig> pigjobarraylist = new ArrayList<PojoPig>();
+    try {
+      Statement statement = connection.createStatement();
+      ResultSet rs1 = null;
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement
+          .executeQuery("select id from auth_user where username='"
+            + username + "';");
+        while (rs.next()) {
+
+          id = rs.getInt("id");
+
+        }
+
+      }
+
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job;");
+
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + ";");
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time >= date('" + startdate + "') AND start_time <= date('" + endtime + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time >= date('" + startdate + "') AND start_time <= date('" + endtime + "');");
+        }
+
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time >= date('" + startdate + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time >= date('" + startdate + "');");
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where start_time <= date('" + endtime + "');");
+        } else {
+
+          rs1 = statement.executeQuery("select status,start_time,statusdir,script_title,user_id from pig_job where user_id =" + id + " AND start_time <= date('" + endtime + "');");
+        }
+
+      }
+
+      while (rs1.next()) {
+        PojoPig pigjjobobject = new PojoPig();
+
+        int runstatus = rs1.getInt("status");
+
+        if (runstatus == 1) {
+          pigjjobobject.setStatus("RUNNING");
+        } else if (runstatus == 2) {
+          pigjjobobject.setStatus("SUCCEEDED");
+        } else if (runstatus == 3) {
+          pigjjobobject.setStatus("SUBMIT_FAILED");
+        } else if (runstatus == 4) {
+          pigjjobobject.setStatus("KILLED");
+        }
+        String title = rs1.getString("script_title");
+
+
+        pigjjobobject.setTitle(title);
+        String dir = rs1.getString("statusdir");
+        pigjjobobject.setDir(dir);
+        Date created_data = rs1.getDate("start_time");
+        pigjjobobject.setDt(created_data);
+
+        pigjobarraylist.add(pigjjobobject);
+
+        i++;
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("Sqlexception: " , e);
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("Sqlexception in closing the connection: " , e);
+
+      }
+    }
+
+    return pigjobarraylist;
+
+  }
+
+  public void createDirPigJob(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+          );
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class.getName()
+          );
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+  }
+
+  /**/
+  public void createDirPigJobSecured(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+  }
+
+  /**/
+  public void copyFileBetweenHdfs(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
+    throws IOException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration confAmbari = new Configuration();
+          confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
+          confAmbari.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
+
+          Configuration confHue = new Configuration();
+          confHue.set("fs.defaultFS", nameNodeuriAmbari);
+          confHue.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystemHue = FileSystem.get(confHue);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path1 = new Path(source);
+          FSDataInputStream in1 = fileSystemHue.open(path1);
+
+          Path path = new Path(dest1);
+          if (fileSystemAmbari.exists(path)) {
+
+          }
+
+          FSDataOutputStream out = fileSystemAmbari.create(path);
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in1.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in1.close();
+          out.close();
+          fileSystemAmbari.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+
+  }
+
+  /**/
+  public void copyFileBetweenHdfsSecured(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
+    throws IOException {
+
+    try {
+
+      final Configuration confAmbari = new Configuration();
+      confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
+      confAmbari.set("hadoop.job.ugi", "hdfs");
+
+      final Configuration confHue = new Configuration();
+      confHue.set("fs.defaultFS", nameNodeuriAmbari);
+      confHue.set("hadoop.job.ugi", "hdfs");
+
+      confAmbari.set("hadoop.security.authentication", "Kerberos");
+      confHue.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+
+          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
+
+          FileSystem fileSystemHue = FileSystem.get(confHue);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path1 = new Path(source);
+          FSDataInputStream in1 = fileSystemHue.open(path1);
+
+          Path path = new Path(dest1);
+          if (fileSystemAmbari.exists(path)) {
+
+          }
+          FSDataOutputStream out = fileSystemAmbari.create(path);
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in1.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in1.close();
+          out.close();
+          fileSystemAmbari.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception: " , e);
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java
new file mode 100644
index 0000000..e3c668f
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/pig/PigScriptImpl.java
@@ -0,0 +1,600 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.service.pig;
+
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONArray;
+import org.json.JSONObject;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+
+
+public class PigScriptImpl {
+
+  static final Logger logger = Logger.getLogger(PigJobImpl.class);
+
+  private static String readAll(Reader rd) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    int cp;
+    while ((cp = rd.read()) != -1) {
+      sb.append((char) cp);
+    }
+    return sb.toString();
+  }
+
+  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+
+    XMLOutputter xmlOutput = new XMLOutputter();
+
+    xmlOutput.setFormat(Format.getPrettyFormat());
+
+    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+
+        Element rootNode = doc.getRootElement();
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (JDOMException e) {
+        logger.error("JDOMException: " , e);
+      }
+
+
+    } else {
+      // create
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        doc.getRootElement().addContent(record);
+
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (IOException io) {
+        logger.error("IOException: " , io);
+
+      }
+
+    }
+
+
+  }
+
+  public int fetchInstanceTablenamePigScript(String driverName, Connection c, String instance) throws SQLException {
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name='" + instance + "';");
+    }
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+
+    }
+
+    return id;
+
+  }
+
+  public int fetchmaxIdforPigSavedScript(String driverName, Connection c, int id) throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_PIGSCRIPT_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + "");
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 0;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+
+    return num;
+  }
+
+  public void insertRowForPigScript(String driverName, String dirname, int maxcountforpigjob, int maxcount, String time, String time2, long epochtime, String title, Connection c, int id, String instance, int i) throws SQLException, IOException {
+
+    String maxcount1 = Integer.toString(maxcount);
+    String epochtime1 = Long.toString(epochtime);
+    String ds_id = new String();
+    Statement stmt = null;
+    String sql2 = "";
+    String revsql = "";
+
+    stmt = c.createStatement();
+
+    if (driverName.contains("mysql")) {
+      sql2 = "INSERT INTO DS_PIGSCRIPT_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00',0,'admin','" + dirname + "','','','" + title + "');";
+      revsql = "delete from  DS_PIGSCRIPT_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql2 = "INSERT INTO ds_pigscript_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00','f','admin','" + dirname + "','','','" + title + "');";
+      revsql = "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql2 = "INSERT INTO ds_pigscript_" + id + " values ('" + maxcount1 + "','1970-01-17 20:28:55.586000 +00:00:00','f','admin','" + dirname + "','','','" + title + "')";
+      revsql = "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount1 + "'";
+
+    }
+
+    stmt.executeUpdate(sql2);
+
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+  }
+
+
+  public long getEpochTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+
+    return epoch;
+
+  }
+
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+
+    return s;
+
+  }
+
+
+  public String getTimeInorder() throws ParseException {
+    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
+    Date now = new Date();
+    String strDate = sdfDate.format(now);
+    return strDate;
+  }
+
+
+  public ArrayList<PojoPig> fetchFromHueDatabase(String username, String startdate, String endtime, Connection connection, String driverName) throws ClassNotFoundException, IOException {
+    int id = 0;
+    int i = 0;
+    ResultSet rs1 = null;
+    String[] query = new String[100];
+    ArrayList<PojoPig> pigArrayList = new ArrayList<PojoPig>();
+    try {
+
+      Statement statement = connection.createStatement();
+
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement
+          .executeQuery("select id from auth_user where username='"
+            + username + "';");
+        while (rs.next()) {
+          id = rs.getInt("id");
+        }
+
+      }
+
+
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=true;");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1;");
+          }
+
+        } else {
+
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + ";");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + ";");
+          }
+
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created >= date('" + startdate + "') AND date_created <= date('" + endtime + "');");
+          }
+
+        }
+
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created >= date('" + startdate + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created >= date('" + startdate + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created >= date('" + startdate + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created >= date('" + startdate + "');");
+          }
+
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND date_created <= date('" + endtime + "');");
+          }
+
+        } else {
+          if (driverName.contains("postgresql")) {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =" + id + " AND date_created <= date('" + endtime + "');");
+
+          } else {
+
+            rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =" + id + " AND date_created <= date('" + endtime + "');");
+          }
+
+        }
+
+      }
+      // rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id ="+id+" AND date_created BETWEEN '"+ startdate +"' AND '"  +endtime +"';");
+      while (rs1.next()) {
+        PojoPig pojopig = new PojoPig();
+
+        String script = rs1.getString("pig_script");
+        String title = rs1.getString("title");
+        Date created_data = rs1.getDate("date_created");
+        pojopig.setDt(created_data);
+        pojopig.setScript(script);
+        pojopig.setTitle(title);
+
+        pigArrayList.add(pojopig);
+        i++;
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("SQLException" , e);
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("SQLException" , e);
+      }
+    }
+
+    return pigArrayList;
+
+  }
+
+  public void writetPigScripttoLocalFile(String script, String title, Date createddate, String homedir, String filename2) {
+    try {
+      logger.info(homedir + filename2);
+      File file = new File(homedir + filename2);
+
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(script);
+      bw.close();
+
+
+    } catch (IOException e) {
+
+      logger.error("IOException" , e);
+    }
+
+  }
+
+  public void deletePigScriptLocalFile(String homedir, String filename2) {
+    try{
+
+      File file = new File(homedir + filename2);
+
+      if(file.delete()){
+        logger.info("Temproray file deleted");
+      }else{
+        logger.info("Temproray file delete failed");
+      }
+
+    }catch(Exception e){
+
+     logger.error("File Exception: ",e);
+
+    }
+
+  }
+
+  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+          );
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class.getName()
+          );
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs Exception: " , e);
+    }
+
+  }
+
+  public void putFileinHdfsSecured(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          //	Path pathsource = new Path(source);
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs Exception: " , e);
+
+    }
+
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/BadRequestFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/BadRequestFormattedException.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/BadRequestFormattedException.java
deleted file mode 100644
index 3edacb2..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/BadRequestFormattedException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-public class BadRequestFormattedException extends ServiceFormattedException {
-  private final static int STATUS = 400;
-
-  public BadRequestFormattedException(String message, Throwable exception) {
-    super(message, exception, STATUS);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/FilePaginator.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/FilePaginator.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/FilePaginator.java
deleted file mode 100644
index 64a406d..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/FilePaginator.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.hadoop.fs.FSDataInputStream;
-
-import java.io.IOException;
-import java.nio.charset.Charset;
-import java.util.Arrays;
-
-import static java.lang.Math.ceil;
-
-/**
- * Pagination for HDFS file implementation
- */
-public class FilePaginator {
-  private static int PAGE_SIZE = 1*1024*1024;  // 1MB
-
-  private String filePath;
-  private ViewContext context;
-
-  /**
-   * Constructor
-   * @param filePath Path to file on HDFS
-   * @param context View Context instance
-   */
-  public FilePaginator(String filePath, ViewContext context) {
-    this.filePath = filePath;
-    this.context = context;
-  }
-
-  /**
-   * Set page size
-   * @param PAGE_SIZE size
-   */
-  public static void setPageSize(int PAGE_SIZE) {
-    FilePaginator.PAGE_SIZE = PAGE_SIZE;
-  }
-
-  /**
-   * Get page count
-   * @return page count
-   * @throws IOException
-   * @throws InterruptedException
-   */
-
-  /**
-   * Read one page of size PAGE_SIZE
-   * @param page page index
-   * @return data in UTF-8
-   * @throws IOException
-   * @throws InterruptedException
-   */
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/MisconfigurationFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/MisconfigurationFormattedException.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/MisconfigurationFormattedException.java
deleted file mode 100644
index dad03ec..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/MisconfigurationFormattedException.java
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.util.HashMap;
-
-public class MisconfigurationFormattedException extends WebApplicationException {
-  private final static int STATUS = 500;
-  private final static String message = "Parameter \"%s\" is set to null";
-  private final static Logger LOG =
-      LoggerFactory.getLogger(MisconfigurationFormattedException.class);
-
-  public MisconfigurationFormattedException(String name) {
-    super(errorEntity(name));
-  }
-
-  protected static Response errorEntity(String name) {
-    HashMap<String, Object> response = new HashMap<String, Object>();
-    response.put("message", String.format(message, name));
-    response.put("trace", null);
-    response.put("status", STATUS);
-    return Response.status(STATUS).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON).build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/NotFoundFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/NotFoundFormattedException.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/NotFoundFormattedException.java
deleted file mode 100644
index 00ab049..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/NotFoundFormattedException.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-public class NotFoundFormattedException extends ServiceFormattedException {
-  private final static int STATUS = 404;
-
-  public NotFoundFormattedException(String message, Throwable exception) {
-    super(message, exception, STATUS);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/ServiceFormattedException.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/ServiceFormattedException.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/ServiceFormattedException.java
deleted file mode 100644
index c49a18c..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/utils/ServiceFormattedException.java
+++ /dev/null
@@ -1,101 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.utils;
-
-import org.json.simple.JSONObject;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.PrintWriter;
-import java.io.StringWriter;
-import java.security.AccessControlException;
-import java.util.HashMap;
-
-public class ServiceFormattedException extends WebApplicationException {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(ServiceFormattedException.class);
-
-  public ServiceFormattedException(Throwable e) {
-    super(errorEntity(null, e, suggestStatus(e)));
-  }
-
-  public ServiceFormattedException(String message) {
-    super(errorEntity(message, null, suggestStatus(null)));
-  }
-
-  public ServiceFormattedException(String message, Throwable exception) {
-    super(errorEntity(message, exception, suggestStatus(exception)));
-  }
-
-  public ServiceFormattedException(String message, Throwable exception, int status) {
-    super(errorEntity(message, exception, status));
-  }
-
-  private static int suggestStatus(Throwable exception) {
-    int status = 500;
-    if (exception == null) {
-      return status;
-    }
-    if (exception instanceof AccessControlException) {
-      status = 403;
-    }
-    return status;
-  }
-
-  protected static Response errorEntity(String message, Throwable e, int status) {
-    HashMap<String, Object> response = new HashMap<String, Object>();
-
-    String trace = null;
-
-    response.put("message", message);
-    if (e != null) {
-      trace = e.toString() + "\n\n";
-      StringWriter sw = new StringWriter();
-      e.printStackTrace(new PrintWriter(sw));
-      trace += sw.toString();
-
-      if (message == null) {
-        String innerMessage = e.getMessage();
-        String autoMessage;
-
-        if (innerMessage != null) {
-          autoMessage = String.format("%s [%s]", innerMessage, e.getClass().getSimpleName());
-        } else {
-          autoMessage = e.getClass().getSimpleName();
-        }
-        response.put("message", autoMessage);
-      }
-    }
-    response.put("trace", trace);
-    response.put("status", status);
-
-    if(message != null) {
-      LOG.error(message);
-    }
-    if(trace != null) {
-      LOG.error(trace);
-    }
-
-    Response.ResponseBuilder responseBuilder = Response.status(status).entity(new JSONObject(response)).type(MediaType.APPLICATION_JSON);
-    return responseBuilder.build();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml b/contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml
new file mode 100644
index 0000000..8cca06c
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/WEB-INF/web.xml
@@ -0,0 +1,123 @@
+<?xml version="1.0" encoding="ISO-8859-1" ?>
+
+<!--
+Licensed to the Apache Software Foundation (ASF) under one or more
+contributor license agreements. See the NOTICE file distributed with
+this work for additional information regarding copyright ownership.
+The ASF licenses this file to You under the Apache License, Version 2.0
+(the "License"); you may not use this file except in compliance with
+the License. You may obtain a copy of the License at
+
+http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License. Kerberos, LDAP, Custom. Binary/Htt
+-->
+
+<web-app xmlns="http://java.sun.com/xml/ns/j2ee"
+         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://java.sun.com/xml/ns/j2ee http://java.sun.com/xml/ns/j2ee/web-app_2_4.xsd"
+         version="2.4">
+
+  <display-name>Hello Servlet Application</display-name>
+  <welcome-file-list>
+    <welcome-file>index.jsp</welcome-file>
+  </welcome-file-list>
+  <description>
+    This is the hello servlet view application.
+  </description>
+
+  <servlet>
+    <servlet-name>HiveHistory</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.hive.HiveHistoryMigration</servlet-class>
+    <load-on-startup>1</load-on-startup>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>
+      SavedQuery
+    </display-name>
+    <servlet-name>SavedQuery</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.hive.HiveSavedQueryMigration</servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>PigServlet</display-name>
+    <servlet-name>PigServlet</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.pig.PigScriptMigration</servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>Configuration_check</display-name>
+    <servlet-name>Configuration_check</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck
+    </servlet-class>
+  </servlet>
+
+  <servlet>
+    <description>
+    </description>
+    <display-name>Pigjobsevlet</display-name>
+    <servlet-name>Pigjobsevlet</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.pig.PigJobMigration</servlet-class>
+  </servlet>
+
+  <servlet-mapping>
+    <servlet-name>HiveHistory</servlet-name>
+    <url-pattern>/HiveHistory</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>SavedQuery</servlet-name>
+    <url-pattern>/SavedQuery</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>PigServlet</servlet-name>
+    <url-pattern>/PigServlet</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>Configuration_check</servlet-name>
+    <url-pattern>/Configuration_check</url-pattern>
+  </servlet-mapping>
+
+  <servlet-mapping>
+    <servlet-name>Pigjobsevlet</servlet-name>
+    <url-pattern>/Pigjobsevlet</url-pattern>
+  </servlet-mapping>
+
+  <servlet>
+    <description></description>
+    <display-name>RevertChange</display-name>
+    <servlet-name>RevertChange</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.revertchange.RevertChange</servlet-class>
+  </servlet>
+
+  <servlet-mapping>
+    <servlet-name>RevertChange</servlet-name>
+    <url-pattern>/RevertChange</url-pattern>
+  </servlet-mapping>
+
+  <servlet>
+    <description></description>
+    <display-name>ProgressBarStatus</display-name>
+    <servlet-name>ProgressBarStatus</servlet-name>
+    <servlet-class>org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus
+    </servlet-class>
+  </servlet>
+  <servlet-mapping>
+    <servlet-name>ProgressBarStatus</servlet-name>
+    <url-pattern>/ProgressBarStatus</url-pattern>
+  </servlet-mapping>
+
+</web-app>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/index.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/index.jsp b/contrib/views/hueambarimigration/src/main/resources/index.jsp
new file mode 100644
index 0000000..0ff1f36
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/index.jsp
@@ -0,0 +1,119 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<html>
+<head>
+<title>Hue to Ambari Migration</title>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<!-- Bootstrap CSS and bootstrap datepicker CSS used for styling the demo pages-->
+
+<link rel="stylesheet" href="css/bootstrap.css">
+
+
+
+
+<script src="js/jquery.js"></script>
+<script src="js/bootstrap.min.js"></script>
+
+
+
+
+
+<script type="text/javascript">
+	$(function() {
+		home();
+	});
+	function makeTabActive(tab) {
+		if (!tab) {
+			return;
+		}
+		$(".nav-tab").removeClass('active');
+		$(tab).parents('.nav-tab').addClass('active');
+	}
+	function loadconfiguration(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/checkconfiguration.jsp');
+	}
+	function revertchange(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/revertchange.jsp');
+	}
+	function home(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/homepage.jsp');
+	}
+	function loadhivehistory(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/hivehistoryquerymigration.jsp');
+	}
+	function loadpigscript(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/pigscriptsmigration.jsp');
+	}
+	function loadpigjobs(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/pigjobmigration.jsp');
+	}
+	function loadhivesaved(tab) {
+		makeTabActive(tab);
+		$('#maincenter11').load('ui/hivesavedquerymigration.jsp');
+	}
+</script>
+
+
+</head>
+
+<div class="container">
+	<!-- <div class="jumbotron" style="margin:10px">
+    <h1>Hue to Ambari Migration</h1>        
+  </div> -->
+
+
+
+<div class="row">
+	<nav class="navbar navbar-default">
+		<div class="container-fluid">
+			<ul class="nav navbar-nav">
+				<li class="nav-tab active"><a onclick="home(this)">Home</a></li>
+				<li class="nav-tab"><a onclick="loadconfiguration(this)">Check
+						configuration</a></li>
+				<li class="dropdown nav-tab"><a class="dropdown-toggle"
+					data-toggle="dropdown" href="#">Hive <span class="caret"></span></a>
+					<ul class="dropdown-menu">
+						<li><span onclick="loadhivesaved(this)">HiveSaved Query</span></li>
+						<li><span onclick="loadhivehistory(this)">HiveHistory</span></li>
+					</ul></li>
+				<li class="dropdown nav-tab"><a class="dropdown-toggle"
+					data-toggle="dropdown" href="#">Pig <span class="caret"></span></a>
+					<ul class="dropdown-menu">
+						<li><span onclick="loadpigscript(this)">Pigsavedscript</span></li>
+						<li><span onclick="loadpigjobs(this)">Pigjobs</span></li>
+					</ul></li>
+				<li class="nav-tab"><a onclick="revertchange(this)">Revert
+						the changes Page</a></li>
+			</ul>
+		</div>
+	</nav>
+</div>
+<div>
+	<div class="col-lg-2 main"></div>
+	<div class="col-lg-8 main">
+		<div id="maincenter11"></div>
+	</div>
+</div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/.gitignore b/contrib/views/hueambarimigration/src/main/resources/ui/.gitignore
new file mode 100644
index 0000000..29aa6db
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/.gitignore
@@ -0,0 +1,33 @@
+# See http://help.github.com/ignore-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/bower_components
+node/
+
+# misc
+
+/.idea
+
+# Numerous always-ignore extensions
+*.diff
+*.err
+*.orig
+*.log
+*.rej
+*.swo
+*.swp
+*.vi
+*~
+*.sass-cache
+
+# OS or Editor folders
+.DS_Store
+.cache
+.project
+.settings
+.tmproj
+dist
+nbproject
+Thumbs.db
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/bower.json
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/bower.json b/contrib/views/hueambarimigration/src/main/resources/ui/bower.json
new file mode 100644
index 0000000..ebbf28c
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/bower.json
@@ -0,0 +1,15 @@
+{
+  "name": "huetoambari",
+  "homepage": "https://github.com/apache/ambari",
+  "authors": [
+    "pradarttana"
+  ],
+  "description": "",
+  "main": "",
+  "license": "MIT",
+  "private": true,
+  "dependencies": {
+    "bootstrap": "^3.3.6",
+    "eonasdan-bootstrap-datetimepicker": "^4.17.37"
+  }
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp
new file mode 100644
index 0000000..b60ff41
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/checkconfiguration.jsp
@@ -0,0 +1,57 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+
+<script type="text/javascript">
+	$(document).ready(function() {
+		// we call the function
+		conf_check();
+	});
+	function conf_check() {
+		var url = "Configuration_check";
+
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("areacenter").innerHTML = result;
+
+			}
+		});
+  }
+</script>	
+
+</head>
+<div class="panel panel-default">
+	<div class="panel-heading">
+		<h3>Checking configuration</h3>
+	</div>
+	<div class="panel-body">
+		<div id="areacenter">
+			<center>
+				<img src="image/updateimg.gif" alt="Smiley face">
+			</center>
+		</div>
+	</div>
+</div>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp
new file mode 100644
index 0000000..3de2fdf
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hivehistoryquerymigration.jsp
@@ -0,0 +1,229 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<script type="text/javascript">
+
+	function validateAndSearch() {
+
+		var uname = document.getElementById("username");
+		uname = uname.options[uname.selectedIndex].value;
+		var startdate = document.getElementById('startdate').value;
+		var enddate = document.getElementById('enddate').value;
+		var instance = document.getElementById("instance");
+		instance = instance.options[instance.selectedIndex].value;
+
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			$('#progressbar').show();
+			$('#lines').hide();
+
+			historyquery(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+		}
+	}
+
+	function loadpercentage() {
+		$.ajax({
+      url : "ProgressBarStatus",
+      success : function(result) {
+        $('#progressbarhivesavedquery').css('width', result);
+        console.log("Got the precentage completion "+ result);
+      },
+    });
+
+  }
+
+	function historyquery(uname, startdate, enddate, instance) {
+
+		var url = "HiveHistory?username=" + uname + "&startdate=" + startdate
+        				+ "&enddate=" + enddate + "&instance=" + instance;
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				$('#progressbar').hide()
+				$('#lines').show()
+				clearInterval(interval);
+
+			}
+		});
+
+
+
+	}
+</script>
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+</head>
+<div class="row">
+	<%
+		ArrayList<String> username = new ArrayList<String>();
+		ArrayList<String> instancename = new ArrayList<String>();
+		int i;
+
+		Connection conn = null;
+
+		ServletContext context = request.getServletContext();
+        ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+        System.out.println(view.getProperties());
+
+		conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+
+		Statement stat = conn.createStatement();
+
+		ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+		while (rs.next()) {
+			username.add(rs.getString(2));
+		}
+
+		rs.close();
+
+		Connection c = null;
+		Statement stmt = null;
+
+		c = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+
+		c.setAutoCommit(false);
+		stmt = c.createStatement();
+
+		ResultSet rs1=null;
+
+		if(view.getProperties().get("ambaridrivername").contains("oracle")){
+      rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}'");
+    } else {
+      rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';");
+    }
+
+		while (rs1.next()) {
+			instancename.add(rs1.getString(1));
+
+		}
+		rs1.close();
+		stmt.close();
+
+	%>
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="validateAndSearch()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Hive History Query Migration</h3>
+				</div>
+				<div class="panel-body">
+
+					<div class="row">
+						<div class="col-sm-3">
+							UserName <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username1234(*)" name="username1" id="username1"> -->
+							<select class="form-control" name="username"
+								placeholder="User name" id="username" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance" id="instance"> -->
+							<select class="form-control" name="instance"
+								placeholder="Instance name" id="instance" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate"
+								id="startdate">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate"
+								id="enddate">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="validateAndSearch()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+					<br>
+					<br>
+					   <div class="progress" id="progressbar" style="display: none;">
+              <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+              </div>
+              </div>
+				</div>
+		</form>
+
+	</div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp
new file mode 100644
index 0000000..c70751d
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/hivesavedquerymigration.jsp
@@ -0,0 +1,240 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<!DOCTYPE html>
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<!-- Bootstrap CSS and bootstrap datepicker CSS used for styling the demo pages-->
+
+
+
+
+ <script type="text/javascript">
+
+	function validateAndSearch() {
+
+		var uname = document.getElementById("username");
+		uname = uname.options[uname.selectedIndex].value;
+		var startdate = document.getElementById('startdate').value;
+		var enddate = document.getElementById('enddate').value;
+		var instance = document.getElementById("instance");
+		instance = instance.options[instance.selectedIndex].value;
+
+		if (uname == "default") {
+			alert("Please select an username");
+		} else if (instance == "default") {
+			alert("Please select an instance name");
+		} else {
+			$('#progressbar').show();
+			$('#lines').hide();
+
+			historyquery(uname, startdate, enddate, instance);
+			interval = setInterval(loadpercentage, 1000 );
+
+		}
+
+	}
+
+	function loadpercentage() {
+      $.ajax({
+         url : "ProgressBarStatus",
+         success : function(result) {
+         $('#progressbarhivesavedquery').css('width', result);
+         console.log("Got the precentage completion "+ result);
+        },
+
+      });
+
+  }
+
+
+
+	function historyquery(uname, startdate, enddate, instance) {
+
+		var url = "SavedQuery?username=" + uname + "&startdate=" + startdate
+        				+ "&enddate=" + enddate + "&instance=" + instance;
+		$.ajax({
+			url : url,
+			success : function(result) {
+				console.log("Got Result");
+				document.getElementById("lines").innerHTML = result;
+				$('#progressbar').hide()
+				$('#lines').show()
+				clearInterval(interval);
+
+			}
+		});
+
+
+
+	}
+</script>
+<%@ page import="java.sql.*"%>
+<%@ page import="org.sqlite.*"%>
+<%@ page import="java.util.ArrayList"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase"%>
+<%@ page import="org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase"%>
+<%@ page import="javax.servlet.ServletConfig"%>
+<%@ page import="javax.servlet.ServletContext"%>
+<%@ page import="org.apache.ambari.view.ViewContext"%>
+</head>
+<div class="row">
+	<%
+		ArrayList<String> username = new ArrayList<String>();
+		ArrayList<String> instancename = new ArrayList<String>();
+		int i;
+
+		Connection conn = null;
+
+		ServletContext context = request.getServletContext();
+        ViewContext view=(ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+        System.out.println(view.getProperties());
+
+		conn = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"),view.getProperties().get("huejdbcurl"),view.getProperties().get("huedbusername"),view.getProperties().get("huedbpassword")).getConnection();
+
+		Statement stat = conn.createStatement();
+
+		ResultSet rs = stat.executeQuery("select * from auth_user;");
+
+		while (rs.next()) {
+			username.add(rs.getString(2));
+		}
+
+		rs.close();
+
+		Connection c = null;
+		Statement stmt = null;
+
+		c = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"),view.getProperties().get("ambarijdbcurl"),view.getProperties().get("ambaridbusername"),view.getProperties().get("ambaridbpassword")).getConnection();
+
+		c.setAutoCommit(false);
+		stmt = c.createStatement();
+
+		ResultSet rs1=null;
+
+		if(view.getProperties().get("ambaridrivername").contains("oracle")){
+		  rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}'");
+		} else {
+		  rs1 = stmt.executeQuery("select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';");
+		}
+    while (rs1.next()) {
+			instancename.add(rs1.getString(1));
+
+		}
+		rs1.close();
+		stmt.close();
+
+	%>
+	<div class="col-sm-12">
+		<form method="GET" onSubmit="validateAndSearch()">
+			<div class="panel panel-default">
+				<div class="panel-heading">
+					<h3>Hive Saved Query Migration</h3>
+				</div>
+				<div class="panel-body">
+
+					<div class="row">
+						<div class="col-sm-3">
+							UserName <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter username1234(*)" name="username1" id="username1"> -->
+							<select class="form-control" name="username"
+								placeholder="User name" id="username" required>
+								<option value="default" selected>Select below</option>
+								<option value="all">ALL User</option>
+
+								<%
+									for (i = 0; i < username.size(); i++) {
+								%><option value="<%=username.get(i)%>"><%=username.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									username.clear();
+								%>
+							</select>
+
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">
+							Instance name <font size="3" color="red"> *</font>
+						</div>
+						<div class="col-sm-3">
+							<!-- <input type="text" placeholder="Enter Instance Name(*)" name="instance" id="instance"> -->
+							<select class="form-control" name="instance"
+								placeholder="Instance name" id="instance" required>
+								<option value="default" selected>Select below</option>
+
+								<%
+									for (i = 0; i < instancename.size(); i++) {
+								%><option value="<%=instancename.get(i)%>"><%=instancename.get(i)%></option>
+								<%
+									}
+								%>
+								<%
+									instancename.clear();
+								%>
+							</select>
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">Start Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="startdate"
+								id="startdate">
+						</div>
+					</div>
+					<p></p>
+					<p></p>
+					<div class="row">
+						<div class="col-sm-3">End Date</div>
+						<div class="col-sm-3">
+							<input type="date" placeholder="Enter date" name="enddate"
+								id="enddate">
+						</div>
+					</div>
+
+					<div class="row">
+
+						<div class="col-sm-3">
+							<input type="button" id="submit" class="btn btn-success"
+								value="submit" onclick="validateAndSearch()">
+						</div>
+					</div>
+
+					<div id="lines" style="display: none;"></div>
+
+					<br>
+					<br>
+					   <div class="progress" id="progressbar" style="display: none;">
+              <div id="progressbarhivesavedquery" class="progress-bar" role="progressbar" aria-valuenow="70" aria-valuemin="0" aria-valuemax="100"  style="width:0%">
+          </div>
+        </div>
+
+				</div>
+		</form>
+
+	</div>
+</div>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp b/contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp
new file mode 100644
index 0000000..69aadac
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/resources/ui/homepage.jsp
@@ -0,0 +1,31 @@
+<!--
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+-->
+
+<%@ page language="java" contentType="text/html; charset=US-ASCII"
+	pageEncoding="US-ASCII"%>
+<!DOCTYPE html PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=US-ASCII">
+
+</head>
+<body>
+
+
+</body>
+</html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.bowerrc
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.bowerrc b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.bowerrc
deleted file mode 100644
index 959e169..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.bowerrc
+++ /dev/null
@@ -1,4 +0,0 @@
-{
-  "directory": "bower_components",
-  "analytics": false
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.editorconfig
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.editorconfig b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.editorconfig
deleted file mode 100644
index 47c5438..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.editorconfig
+++ /dev/null
@@ -1,34 +0,0 @@
-# EditorConfig helps developers define and maintain consistent
-# coding styles between different editors and IDEs
-# editorconfig.org
-
-root = true
-
-
-[*]
-end_of_line = lf
-charset = utf-8
-trim_trailing_whitespace = true
-insert_final_newline = true
-indent_style = space
-indent_size = 2
-
-[*.js]
-indent_style = space
-indent_size = 2
-
-[*.hbs]
-insert_final_newline = false
-indent_style = space
-indent_size = 2
-
-[*.css]
-indent_style = space
-indent_size = 2
-
-[*.html]
-indent_style = space
-indent_size = 2
-
-[*.{diff,md}]
-trim_trailing_whitespace = false

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.ember-cli
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.ember-cli b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.ember-cli
deleted file mode 100644
index 427f570..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.ember-cli
+++ /dev/null
@@ -1,10 +0,0 @@
-
-{
-  /**
-    Ember CLI sends analytics information by default. The data is completely
-    anonymous, but there are times when you might want to disable this behavior.
-
-    Setting `disableAnalytics` to true will prevent any data from being sent.
-  */
-  "disableAnalytics": false
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.gitignore
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.gitignore b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.gitignore
deleted file mode 100644
index f7245d9..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.gitignore
+++ /dev/null
@@ -1,44 +0,0 @@
-# See http://help.github.com/ignore-files/ for more about ignoring files.
-
-# compiled output
-/dist
-/tmp
-
-# NPM packages folder.
-
-node_modules/
-bower_components/
-node/
-
-# misc
-/.sass-cache
-/connect.lock
-/coverage/*
-/libpeerconnection.log
-npm-debug.log
-testem.log
-
-/.idea
-
-# Numerous always-ignore extensions
-*.diff
-*.err
-*.orig
-*.log
-*.rej
-*.swo
-*.swp
-*.vi
-*~
-*.sass-cache
-
-# OS or Editor folders
-.DS_Store
-.cache
-.project
-.settings
-.tmproj
-dist
-nbproject
-Thumbs.db
-


[11/11] ambari git commit: AMBARI-17476. ServiceAccounts page nota available in ambari (alexantonenko)

Posted by al...@apache.org.
AMBARI-17476. ServiceAccounts page nota available in ambari (alexantonenko)


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/6b6ce800
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/6b6ce800
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/6b6ce800

Branch: refs/heads/branch-2.4
Commit: 6b6ce800e48a6ac0ae8c10edc36d6076e64497c9
Parents: f403a36
Author: Alex Antonenko <hi...@gmail.com>
Authored: Wed Jun 29 15:49:55 2016 +0300
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Wed Jun 29 17:36:51 2016 +0300

----------------------------------------------------------------------
 .../main/resources/ui/admin-web/app/scripts/services/Cluster.js  | 2 +-
 ambari-web/app/routes/main.js                                    | 4 ++--
 ambari-web/app/templates/application.hbs                         | 4 ++--
 ambari-web/app/views/main/admin.js                               | 2 +-
 ambari-web/app/views/main/menu.js                                | 4 ++--
 ambari-web/test/app_test.js                                      | 2 +-
 6 files changed, 9 insertions(+), 9 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
----------------------------------------------------------------------
diff --git a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
index 3160cd0..36baeb5 100644
--- a/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
+++ b/ambari-admin/src/main/resources/ui/admin-web/app/scripts/services/Cluster.js
@@ -76,7 +76,7 @@ angular.module('ambariAdminConsole')
       "AMBARI.MANAGE_USERS",
       "AMBARI.MANAGE_VIEWS",
       "AMBARI.RENAME_CLUSTER",
-      "AMBARI.SET_SERVICE_USERS_GROUPS"
+      "SERVICE.SET_SERVICE_USERS_GROUPS"
     ],
 
     orderedLevels: ['SERVICE', 'HOST', 'CLUSTER', 'AMBARI'],

http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-web/app/routes/main.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/routes/main.js b/ambari-web/app/routes/main.js
index 4545f54..ae16e27 100644
--- a/ambari-web/app/routes/main.js
+++ b/ambari-web/app/routes/main.js
@@ -348,7 +348,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
   admin: Em.Route.extend({
     route: '/admin',
     enter: function (router, transition) {
-      if (router.get('loggedIn') && !App.isAuthorized('CLUSTER.TOGGLE_KERBEROS, AMBARI.SET_SERVICE_USERS_GROUPS, CLUSTER.UPGRADE_DOWNGRADE_STACK, CLUSTER.VIEW_STACK_DETAILS')
+      if (router.get('loggedIn') && !App.isAuthorized('CLUSTER.TOGGLE_KERBEROS, SERVICE.SET_SERVICE_USERS_GROUPS, CLUSTER.UPGRADE_DOWNGRADE_STACK, CLUSTER.VIEW_STACK_DETAILS')
         && !(App.get('upgradeInProgress') || App.get('upgradeHolding'))) {
         Em.run.next(function () {
           router.transitionTo('main.dashboard.index');
@@ -527,7 +527,7 @@ module.exports = Em.Route.extend(App.RouterRedirections, {
     adminServiceAccounts: Em.Route.extend({
       route: '/serviceAccounts',
       enter: function (router, transition) {
-        if (router.get('loggedIn') && !App.isAuthorized('AMBARI.SET_SERVICE_USERS_GROUPS')) {
+        if (router.get('loggedIn') && !App.isAuthorized('SERVICE.SET_SERVICE_USERS_GROUPS')) {
           router.transitionTo('main.dashboard.index');
         }
       },

http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-web/app/templates/application.hbs
----------------------------------------------------------------------
diff --git a/ambari-web/app/templates/application.hbs b/ambari-web/app/templates/application.hbs
index ce78c89..47be986 100644
--- a/ambari-web/app/templates/application.hbs
+++ b/ambari-web/app/templates/application.hbs
@@ -65,7 +65,7 @@
                 <li><a href="" id="about" {{action showAboutPopup target="controller"}}>{{t app.aboutAmbari}}</a></li>
                 {{#if App.router.clusterInstallCompleted}}
                   {{#if isClusterDataLoaded}}
-                    {{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS, AMBARI.ASSIGN_ROLES, AMBARI.EDIT_STACK_REPOS, AMBARI.MANAGE_GROUPS, AMBARI.MANAGE_STACK_VERSIONS, AMBARI.MANAGE_USERS, AMBARI.MANAGE_VIEWS, AMBARI.RENAME_CLUSTER, AMBARI.SET_SERVICE_USERS_GROUPS"}}
+                    {{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS, AMBARI.ASSIGN_ROLES, AMBARI.EDIT_STACK_REPOS, AMBARI.MANAGE_GROUPS, AMBARI.MANAGE_STACK_VERSIONS, AMBARI.MANAGE_USERS, AMBARI.MANAGE_VIEWS, AMBARI.RENAME_CLUSTER, SERVICE.SET_SERVICE_USERS_GROUPS"}}
                       <li><a href=""
                              id="manage-ambari" {{action goToAdminView target="controller"}}>{{t app.manageAmbari}}</a>
                       </li>
@@ -73,7 +73,7 @@
                   {{/if}}
                 {{else}}
                   {{#if App.isPermissionDataLoaded}}
-                    {{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS, AMBARI.ASSIGN_ROLES, AMBARI.EDIT_STACK_REPOS, AMBARI.MANAGE_GROUPS, AMBARI.MANAGE_STACK_VERSIONS, AMBARI.MANAGE_USERS, AMBARI.MANAGE_VIEWS, AMBARI.RENAME_CLUSTER, AMBARI.SET_SERVICE_USERS_GROUPS"}}
+                    {{#isAuthorized "AMBARI.ADD_DELETE_CLUSTERS, AMBARI.ASSIGN_ROLES, AMBARI.EDIT_STACK_REPOS, AMBARI.MANAGE_GROUPS, AMBARI.MANAGE_STACK_VERSIONS, AMBARI.MANAGE_USERS, AMBARI.MANAGE_VIEWS, AMBARI.RENAME_CLUSTER, SERVICE.SET_SERVICE_USERS_GROUPS"}}
                       <li><a href=""
                              id="manage-ambari" {{action goToAdminView target="controller"}}>{{t app.manageAmbari}}</a>
                       </li>

http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-web/app/views/main/admin.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/admin.js b/ambari-web/app/views/main/admin.js
index ebe97cf..704527b 100644
--- a/ambari-web/app/views/main/admin.js
+++ b/ambari-web/app/views/main/admin.js
@@ -30,7 +30,7 @@ App.MainAdminView = Em.View.extend({
         label: Em.I18n.t('admin.stackUpgrade.title')
       });
     }
-    if(App.isAuthorized('AMBARI.SET_SERVICE_USERS_GROUPS') || (App.get('upgradeInProgress') || App.get('upgradeHolding'))) {
+    if(App.isAuthorized('SERVICE.SET_SERVICE_USERS_GROUPS') || (App.get('upgradeInProgress') || App.get('upgradeHolding'))) {
       items.push({
         name: 'adminServiceAccounts',
         url: 'adminServiceAccounts',

http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-web/app/views/main/menu.js
----------------------------------------------------------------------
diff --git a/ambari-web/app/views/main/menu.js b/ambari-web/app/views/main/menu.js
index c6feafe..f60ae8a 100644
--- a/ambari-web/app/views/main/menu.js
+++ b/ambari-web/app/views/main/menu.js
@@ -43,7 +43,7 @@ App.MainMenuView = Em.CollectionView.extend({
               {label: Em.I18n.t('menu.item.alerts'), routing: 'alerts'}
           );
         }
-        if (App.isAuthorized('CLUSTER.TOGGLE_KERBEROS, CLUSTER.MODIFY_CONFIGS, SERVICE.START_STOP, AMBARI.SET_SERVICE_USERS_GROUPS, CLUSTER.UPGRADE_DOWNGRADE_STACK, CLUSTER.VIEW_STACK_DETAILS')
+        if (App.isAuthorized('CLUSTER.TOGGLE_KERBEROS, CLUSTER.MODIFY_CONFIGS, SERVICE.START_STOP, SERVICE.SET_SERVICE_USERS_GROUPS, CLUSTER.UPGRADE_DOWNGRADE_STACK, CLUSTER.VIEW_STACK_DETAILS')
           || (App.get('upgradeInProgress') || App.get('upgradeHolding'))) {
           result.push({ label: Em.I18n.t('menu.item.admin'), routing: 'admin'});
         }
@@ -112,7 +112,7 @@ App.MainMenuView = Em.CollectionView.extend({
             label: Em.I18n.t('admin.stackUpgrade.title')
           });
         }
-        if(App.isAuthorized('AMBARI.SET_SERVICE_USERS_GROUPS') ||  (App.get('upgradeInProgress') || App.get('upgradeHolding'))) {
+        if(App.isAuthorized('SERVICE.SET_SERVICE_USERS_GROUPS') ||  (App.get('upgradeInProgress') || App.get('upgradeHolding'))) {
           categories.push({
             name: 'adminServiceAccounts',
             url: 'serviceAccounts',

http://git-wip-us.apache.org/repos/asf/ambari/blob/6b6ce800/ambari-web/test/app_test.js
----------------------------------------------------------------------
diff --git a/ambari-web/test/app_test.js b/ambari-web/test/app_test.js
index 445fe94..42789d5 100644
--- a/ambari-web/test/app_test.js
+++ b/ambari-web/test/app_test.js
@@ -20,7 +20,7 @@ var App = require('app');
 require('views/common/quick_view_link_view');
 require('models/host_component');
 require('models/stack_service_component');
-App.auth = ["AMBARI.ADD_DELETE_CLUSTERS", "AMBARI.ASSIGN_ROLES", "AMBARI.EDIT_STACK_REPOS", "AMBARI.MANAGE_GROUPS", "AMBARI.MANAGE_STACK_VERSIONS", "AMBARI.MANAGE_USERS", "AMBARI.MANAGE_VIEWS", "AMBARI.RENAME_CLUSTER", "AMBARI.SET_SERVICE_USERS_GROUPS", "CLUSTER.TOGGLE_ALERTS", "CLUSTER.TOGGLE_KERBEROS", "CLUSTER.UPGRADE_DOWNGRADE_STACK", "CLUSTER.VIEW_ALERTS", "CLUSTER.VIEW_CONFIGS", "CLUSTER.VIEW_METRICS", "CLUSTER.VIEW_STACK_DETAILS", "CLUSTER.VIEW_STATUS_INFO", "HOST.ADD_DELETE_COMPONENTS", "HOST.ADD_DELETE_HOSTS", "HOST.TOGGLE_MAINTENANCE", "HOST.VIEW_CONFIGS", "HOST.VIEW_METRICS", "HOST.VIEW_STATUS_INFO", "SERVICE.ADD_DELETE_SERVICES", "SERVICE.COMPARE_CONFIGS", "SERVICE.DECOMMISSION_RECOMMISSION", "SERVICE.ENABLE_HA", "SERVICE.MANAGE_CONFIG_GROUPS", "SERVICE.MODIFY_CONFIGS", "SERVICE.MOVE", "SERVICE.RUN_CUSTOM_COMMAND", "SERVICE.RUN_SERVICE_CHECK", "SERVICE.START_STOP", "SERVICE.TOGGLE_ALERTS", "SERVICE.TOGGLE_MAINTENANCE", "SERVICE.VIEW_ALERTS", "SERVICE.VIEW_CONFIGS", "SERV
 ICE.VIEW_METRICS", "SERVICE.VIEW_STATUS_INFO", "VIEW.USE"];
+App.auth = ["AMBARI.ADD_DELETE_CLUSTERS", "AMBARI.ASSIGN_ROLES", "AMBARI.EDIT_STACK_REPOS", "AMBARI.MANAGE_GROUPS", "AMBARI.MANAGE_STACK_VERSIONS", "AMBARI.MANAGE_USERS", "AMBARI.MANAGE_VIEWS", "AMBARI.RENAME_CLUSTER", "SERVICE.SET_SERVICE_USERS_GROUPS", "CLUSTER.TOGGLE_ALERTS", "CLUSTER.TOGGLE_KERBEROS", "CLUSTER.UPGRADE_DOWNGRADE_STACK", "CLUSTER.VIEW_ALERTS", "CLUSTER.VIEW_CONFIGS", "CLUSTER.VIEW_METRICS", "CLUSTER.VIEW_STACK_DETAILS", "CLUSTER.VIEW_STATUS_INFO", "HOST.ADD_DELETE_COMPONENTS", "HOST.ADD_DELETE_HOSTS", "HOST.TOGGLE_MAINTENANCE", "HOST.VIEW_CONFIGS", "HOST.VIEW_METRICS", "HOST.VIEW_STATUS_INFO", "SERVICE.ADD_DELETE_SERVICES", "SERVICE.COMPARE_CONFIGS", "SERVICE.DECOMMISSION_RECOMMISSION", "SERVICE.ENABLE_HA", "SERVICE.MANAGE_CONFIG_GROUPS", "SERVICE.MODIFY_CONFIGS", "SERVICE.MOVE", "SERVICE.RUN_CUSTOM_COMMAND", "SERVICE.RUN_SERVICE_CHECK", "SERVICE.START_STOP", "SERVICE.TOGGLE_ALERTS", "SERVICE.TOGGLE_MAINTENANCE", "SERVICE.VIEW_ALERTS", "SERVICE.VIEW_CONFIGS", "SER
 VICE.VIEW_METRICS", "SERVICE.VIEW_STATUS_INFO", "VIEW.USE"];
 
 describe('App', function () {
 


[09/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java
deleted file mode 100644
index f3349c6..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/OracleQuerySetAmbariDB.java
+++ /dev/null
@@ -1,44 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
-
-/**
- *
- * Overriding methods for Oracle specific queries
- */
-
-public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + "";
-  }
-  @Override
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?";
-  }
-  @Override
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')";
-  }
-  @Override
-  protected String getRevSql(int id,String maxcount){
-    return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "'";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java
deleted file mode 100644
index 5f4356b..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/PostgressQuerySetAmbariDB.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
-
-
-public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
deleted file mode 100644
index 5be8cc1..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/QuerySetAmbariDB.java
+++ /dev/null
@@ -1,79 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-/**
- * History Query Prepared statemets
- */
-
-public abstract class QuerySetAmbariDB {
-
-  public PreparedStatement getTableIdFromInstanceName(Connection connection, String instance) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceName());
-    prSt.setString(1, instance);
-    return prSt;
-  }
-
-  public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
-
-    return prSt;
-  }
-
-  public PreparedStatement insertToHiveHistory(Connection connection, int id, String maxcount, long epochtime, String dirname) throws SQLException {
-
-    String Logfile=  dirname + "logs";
-    String queryHqlFile= dirname + "query.hql";
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlInsertHiveHistory(id));
-
-    prSt.setString(1, maxcount);
-    prSt.setLong(2, epochtime);
-    prSt.setString(3, Logfile);
-    prSt.setString(4, queryHqlFile);
-    prSt.setString(5, dirname);
-
-    return prSt;
-  }
-
-  public String RevertSql(int id,String maxcount) throws SQLException {
-    return getRevSql(id,maxcount);
-  }
-
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";";
-  }
-
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
-  }
-
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
-  }
-
-  protected String getRevSql(int id,String maxcount){
-    return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "';";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/MysqlQuerySetAmbariDB.java
deleted file mode 100644
index b3a0323..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/MysqlQuerySetAmbariDB.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail;
-
-
-public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/OracleQuerySetAmbariDB.java
deleted file mode 100644
index bc41bf7..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/OracleQuerySetAmbariDB.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail;
-
-
-public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getHiveInstanceSql(){
-    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}'";
-  }
-  @Override
-  protected String getAllInstanceDetailSql(){
-    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}' or view_name='PIG{1.0.0}';";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/PostgressQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/PostgressQuerySetAmbariDB.java
deleted file mode 100644
index 3297f32..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/PostgressQuerySetAmbariDB.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail;
-
-
-public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java
deleted file mode 100644
index da1c411..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/instancedetail/QuerySetAmbariDB.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.instancedetail;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-/**
- * Service class to get instance detail
- */
-
-public abstract class QuerySetAmbariDB {
-
-  public PreparedStatement getHiveInstanceDeatil(Connection connection) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(getHiveInstanceSql());
-    return prSt;
-  }
-
-  public PreparedStatement getAllInstanceDeatil(Connection connection) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(getAllInstanceDetailSql());
-    return prSt;
-  }
-
-  protected String getHiveInstanceSql(){
-    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}';";
-  }
-
-  protected String getAllInstanceDetailSql(){
-    return "select distinct(view_instance_name) as instancename from viewentity where view_name='HIVE{1.0.0}' or view_name='PIG{1.0.0}';";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java
deleted file mode 100644
index 2b40491..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/MysqlQuerySetAmbariDB.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset;
-
-/**
- *  override methods specific to Mysql
- */
-
-public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getSqlMaxDSidFromTableIdSavedQuery(int id) {
-    return "select max( cast(ds_id as unsigned) ) as max from DS_SAVEDQUERY_" + id + ";";
-  }
-
-  @Override
-  protected String getTableIdSqlFromInstanceNameSavedQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name=?;";
-  }
-
-  @Override
-  protected String getSqlMaxDSidFromTableIdHistoryQuery(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from DS_JOBIMPL_" + id + ";";
-  }
-
-  @Override
-  protected String getTableIdSqlFromInstanceNameHistoryQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
-  }
-
-  @Override
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
-  }
-
-  @Override
-  protected String getSqlInsertSavedQuery(int id) {
-    return "INSERT INTO DS_SAVEDQUERY_" + id + " values (?,?,'" + "admin" + "',?,?,?);";
-  }
-
-  @Override
-  protected String getRevSqlSavedQuery(int id, String maxcount) {
-    return "delete from  DS_SAVEDQUERY_" + id + " where ds_id='" + maxcount + "';";
-  }
-
-  @Override
-  protected String getRevSqlHistoryQuery(int id, String maxcount) {
-    return "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount + "';";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java
deleted file mode 100644
index 11f9170..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/OracleQuerySetAmbariDB.java
+++ /dev/null
@@ -1,58 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset;
-
-public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getSqlMaxDSidFromTableIdSavedQuery(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + "";
-  }
-
-  @Override
-  protected String getTableIdSqlFromInstanceNameSavedQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name=?";
-  }
-
-  @Override
-  protected String getSqlMaxDSidFromTableIdHistoryQuery(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + "";
-  }
-  @Override
-  protected String getTableIdSqlFromInstanceNameHistoryQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?";
-  }
-
-  @Override
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet')";
-  }
-  @Override
-  protected String getSqlInsertSavedQuery(int id) {
-    return "INSERT INTO ds_savedquery_" + id + " values (?,?,'" + "admin" + "',?,?,?)";
-  }
-
-  @Override
-  protected String getRevSqlSavedQuery(int id, String maxcount) {
-    return "delete from  ds_savedquery_" + id + " where ds_id='" + maxcount + "'";
-  }
-  @Override
-  protected String getRevSqlHistoryQuery(int id, String maxcount) {
-    return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "'";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/PostgressQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/PostgressQuerySetAmbariDB.java
deleted file mode 100644
index 7c8eeab..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/PostgressQuerySetAmbariDB.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset;
-
-
-public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java
deleted file mode 100644
index 8ab13b2..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/savedqueryset/QuerySetAmbariDB.java
+++ /dev/null
@@ -1,131 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-/**
- * Prepared statement for Saved query
- */
-
-public abstract class QuerySetAmbariDB {
-
-  public PreparedStatement getTableIdFromInstanceNameSavedquery(Connection connection, String instance) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceNameSavedQuery());
-    prSt.setString(1, instance);
-    return prSt;
-  }
-
-  public PreparedStatement getTableIdFromInstanceNameHistoryquery(Connection connection, String instance) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceNameHistoryQuery());
-    prSt.setString(1, instance);
-    return prSt;
-  }
-
-  public PreparedStatement getMaxDsIdFromTableIdHistoryquery(Connection connection, int id) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableIdHistoryQuery(id));
-    return prSt;
-  }
-
-  public PreparedStatement getMaxDsIdFromTableIdSavedquery(Connection connection, int id) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableIdSavedQuery(id));
-
-    return prSt;
-  }
-
-  public PreparedStatement insertToHiveHistory(Connection connection, int id, String maxcount, long epochtime, String dirname) throws SQLException {
-
-    String Logfile = dirname + "logs";
-    String queryHqlFile = dirname + "query.hql";
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlInsertHiveHistory(id));
-
-    prSt.setString(1, maxcount);
-    prSt.setLong(2, epochtime);
-    prSt.setString(3, Logfile);
-    prSt.setString(4, queryHqlFile);
-    prSt.setString(5, dirname);
-
-    return prSt;
-  }
-
-  public PreparedStatement insertToHiveSavedQuery(Connection connection, int id, String maxcount, String database, String dirname, String query, String name) throws SQLException {
-
-    String Logfile = dirname + "logs";
-    String queryHqlFile = dirname + "query.hql";
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlInsertSavedQuery(id));
-
-    prSt.setString(1, maxcount);
-    prSt.setString(2, database);
-    prSt.setString(3, queryHqlFile);
-    prSt.setString(4, query);
-    prSt.setString(5, name);
-
-    return prSt;
-  }
-
-  public String revertSqlHistoryQuery(int id, String maxcount) throws SQLException {
-
-    return getRevSqlHistoryQuery(id, maxcount);
-  }
-
-  public String revertSqlSavedQuery(int id, String maxcount) throws SQLException {
-
-    return getRevSqlSavedQuery(id, maxcount);
-  }
-
-  protected String getSqlMaxDSidFromTableIdSavedQuery(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";";
-  }
-
-  protected String getTableIdSqlFromInstanceNameSavedQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name=?;";
-  }
-
-  protected String getSqlMaxDSidFromTableIdHistoryQuery(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";";
-  }
-
-  protected String getTableIdSqlFromInstanceNameHistoryQuery() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
-  }
-
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO ds_jobimpl_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
-  }
-
-  protected String getSqlInsertSavedQuery(int id) {
-    return "INSERT INTO ds_savedquery_" + id + " values (?,?,'" + "admin" + "',?,?,?);";
-  }
-
-  protected String getRevSqlSavedQuery(int id, String maxcount) {
-    return "delete from  ds_savedquery_" + id + " where ds_id='" + maxcount + "';";
-  }
-
-  protected String getRevSqlHistoryQuery(int id, String maxcount) {
-    return "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount + "';";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/MysqlQuerySetAmbariDB.java
deleted file mode 100644
index 429a598..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/MysqlQuerySetAmbariDB.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail;
-
-
-public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/OracleQuerySetAmbariDB.java
deleted file mode 100644
index e8b4265..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/OracleQuerySetAmbariDB.java
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail;
-
-/**
- *  override method specific to Query.
- */
-public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getPigInstanceSql(){
-    return  "select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}'";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/PostgressQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/PostgressQuerySetAmbariDB.java
deleted file mode 100644
index 8b7f27b..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/PostgressQuerySetAmbariDB.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail;
-
-
-public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java
deleted file mode 100644
index 6d878fe..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/instancedetail/QuerySetAmbariDB.java
+++ /dev/null
@@ -1,39 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.instancedetail;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-/**
- * Prepared statement for  Pig Instance details
- */
-
-public abstract class QuerySetAmbariDB {
-
-  public PreparedStatement getAllPigInstance(Connection connection) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(getPigInstanceSql());
-    return prSt;
-  }
-
-  protected String getPigInstanceSql(){
-   return  "select distinct(view_instance_name) as instancename from viewentity where view_name='PIG{1.0.0}';";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java
deleted file mode 100644
index fdc2484..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/MysqlQuerySetAmbariDB.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset;
-
-/**
- * Override method specific to Mysql
- */
-
-public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select max( cast(ds_id as unsigned) ) as max from DS_PIGJOB_" + id + ";";
-  }
-
-  @Override
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name=?;";
-  }
-  @Override
-  protected String getSqlinsertToPigJob(int id) {
-    return "INSERT INTO DS_PIGJOB_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?);";
-  }
-  @Override
-  protected String getRevSql(int id, String maxcount) {
-    return "delete from  DS_PIGJOB_" + id + " where ds_id='" + maxcount + "';";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java
deleted file mode 100644
index 2c27409..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/OracleQuerySetAmbariDB.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset;
-
-/**
- * Override methods for Oracle
- */
-public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id + "";
-  }
-  @Override
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name=?";
-  }
-  @Override
-  protected String getSqlinsertToPigJob(int id) {
-    return "INSERT INTO ds_pigjob_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?)";
-  }
-  @Override
-  protected String getRevSql(int id, String maxcount) {
-    return "delete from  ds_pigjob_" + id + " where ds_id='" + maxcount + "'";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/PostgressQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/PostgressQuerySetAmbariDB.java
deleted file mode 100644
index 07a174c..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/PostgressQuerySetAmbariDB.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset;
-
-
-public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java
deleted file mode 100644
index ec70e45..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/jobqueryset/QuerySetAmbariDB.java
+++ /dev/null
@@ -1,80 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-/**
- *  Pig Job Prepare statement
- */
-
-public abstract class QuerySetAmbariDB {
-
-  public PreparedStatement getTableIdFromInstanceName(Connection connection, String instance) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceName());
-    prSt.setString(1, instance);
-    return prSt;
-  }
-
-  public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
-    return prSt;
-  }
-
-  public PreparedStatement insertToPigJob(String dirname, String maxcountforpigjob, long epochtime1, String title, Connection connection, int id, String status) throws SQLException {
-
-    String pigScriptFile = dirname + "script.pig";
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlinsertToPigJob(id));
-
-    prSt.setString(1, maxcountforpigjob);
-    prSt.setLong(2, epochtime1);
-    prSt.setString(3, pigScriptFile);
-    prSt.setString(4, maxcountforpigjob);
-    prSt.setString(5, status);
-    prSt.setString(6, dirname);
-    prSt.setString(7, title);
-
-    return prSt;
-  }
-
-  public String revertSql(int id, String maxcount) throws SQLException {
-    return getRevSql(id, maxcount);
-  }
-
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_pigjob_" + id + ";";
-  }
-
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.jobs.models.PigJob' and view_instance_name=?;";
-  }
-
-  protected String getSqlinsertToPigJob(int id) {
-    return "INSERT INTO ds_pigjob_" + id + " values (?,?,0,'','f','','','admin',0,?,'',?,'','',?,?,'',?);";
-  }
-
-  protected String getRevSql(int id, String maxcount) {
-    return "delete from  ds_pigjob_" + id + " where ds_id='" + maxcount + "';";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java
deleted file mode 100644
index a8c1c4d..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/MysqlQuerySetAmbariDB.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset;
-
-/**
- * over rides method for Mysql specific queries
- */
-
-public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select max( cast(ds_id as unsigned) ) as max from DS_PIGSCRIPT_" + id + ";";
-  }
-  @Override
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name=?;";
-  }
-  @Override
-  protected String getSqlinsertToPigScript(int id) {
-    return  "INSERT INTO DS_PIGSCRIPT_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00',0,'admin',?,'','',?);";
-  }
-  @Override
-  protected String getRevSql(int id,String maxcount){
-    return "delete from  DS_PIGSCRIPT_" + id + " where ds_id='" + maxcount + "';";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java
deleted file mode 100644
index d7dfc0b..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/OracleQuerySetAmbariDB.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset;
-
-public class OracleQuerySetAmbariDB extends QuerySetAmbariDB {
-  /*
-  * Overriding methods for Oracle specific queries
-  * */
-
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + "";
-  }
-
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name=?";
-  }
-
-  protected String getSqlinsertToPigScript(int id) {
-    return "INSERT INTO ds_pigscript_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00','f','admin',?,'','',?)";
-  }
-
-  protected String getRevSql(int id, String maxcount) {
-    return "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount + "'";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/PostgressQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/PostgressQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/PostgressQuerySetAmbariDB.java
deleted file mode 100644
index fc99751..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/PostgressQuerySetAmbariDB.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset;
-
-
-public class PostgressQuerySetAmbariDB extends QuerySetAmbariDB {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java
deleted file mode 100644
index 4fc61fe..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/pig/savedscriptqueryset/QuerySetAmbariDB.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-/**
- * Pig Script prepared statement
- */
-public abstract class QuerySetAmbariDB {
-
-  public PreparedStatement getTableIdFromInstanceName(Connection connection, String instance) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(getTableIdSqlFromInstanceName());
-    prSt.setString(1, instance);
-    return prSt;
-  }
-
-  public PreparedStatement getMaxDsIdFromTableId(Connection connection, int id) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlMaxDSidFromTableId(id));
-    return prSt;
-  }
-
-  public PreparedStatement insertToPigScript(Connection connection, int id, String maxcount1, String dirname, String title) throws SQLException {
-
-    PreparedStatement prSt = connection.prepareStatement(getSqlinsertToPigScript(id));
-    prSt.setString(1, maxcount1);
-    prSt.setString(2, dirname);
-    prSt.setString(3, title);
-
-    return prSt;
-  }
-
-  public String revertSql(int id, String maxcount) throws SQLException {
-    return getRevSql(id, maxcount);
-  }
-
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select MAX(cast(ds_id as integer)) as max from ds_pigscript_" + id + ";";
-  }
-
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.pig.resources.scripts.models.PigScript' and view_instance_name=?;";
-  }
-
-  protected String getSqlinsertToPigScript(int id) {
-    return "INSERT INTO ds_pigscript_" + id + " values (?,'1970-01-17 20:28:55.586000 +00:00:00','f','admin',?,'','',?);";
-  }
-
-  protected String getRevSql(int id, String maxcount) {
-    return "delete from  ds_pigscript_" + id + " where ds_id='" + maxcount + "';";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/MysqlQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/MysqlQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/MysqlQuerySet.java
deleted file mode 100644
index d379f88..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/MysqlQuerySet.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
-
-
-public class MysqlQuerySet extends QuerySet {
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/OracleQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/OracleQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/OracleQuerySet.java
deleted file mode 100644
index 4afb222..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/OracleQuerySet.java
+++ /dev/null
@@ -1,61 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
-
-
-public class OracleQuerySet extends QuerySet {
-
-  @Override
-  protected String fetchuserIdfromUsernameSql() {
-    return "select id from auth_user where username=?";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select query from beeswax_queryhistory where owner_id =?";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select query from beeswax_queryhistory where owner_id =? AND submission_date <= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select query from beeswax_queryhistory where owner_id =? AND submission_date >= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select query from beeswax_queryhistory where owner_id =? AND submission_date >= date(?) AND submission_date <= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select query from beeswax_queryhistory";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select query from beeswax_queryhistory where submission_date <= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select query from beeswax_queryhistory where submission_date >= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select query from beeswax_queryhistory where submission_date >= date(?) AND submission_date <= date(?)";
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/PostgressQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/PostgressQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/PostgressQuerySet.java
deleted file mode 100644
index 829582a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/PostgressQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
-
-
-public class PostgressQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/QuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/QuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/QuerySet.java
deleted file mode 100644
index 8d2aad4..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/QuerySet.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-
-public abstract class QuerySet {
-
-
-  public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql());
-    prSt.setString(1, username);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql());
-    prSt.setInt(1, id);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, enddate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, startdate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, startdate);
-    prSt.setString(3, endate);
-    return prSt;
-  }
-
-  /**
-   * for all user
-   */
-  public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql());
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql());
-    prSt.setString(1, enddate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql());
-    prSt.setString(1, startdate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql());
-    prSt.setString(1, startdate);
-    prSt.setString(2, endate);
-    return prSt;
-  }
-
-
-  protected String fetchuserIdfromUsernameSql() {
-    return "select id from auth_user where username=?;";
-
-  }
-
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select query from beeswax_queryhistory where owner_id =?;";
-  }
-
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select query from beeswax_queryhistory where owner_id =? AND submission_date <= date(?);";
-  }
-
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select query from beeswax_queryhistory where owner_id =? AND submission_date >= date(?);";
-  }
-
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select query from beeswax_queryhistory where owner_id =? AND submission_date >= date(?) AND submission_date <= date(?);";
-  }
-
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select query from beeswax_queryhistory;";
-  }
-
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select query from beeswax_queryhistory where submission_date <= date(?);";
-  }
-
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select query from beeswax_queryhistory where submission_date >= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select query from beeswax_queryhistory where submission_date >= date(?) AND submission_date <= date(?);";
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/SqliteQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/SqliteQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/SqliteQuerySet.java
deleted file mode 100644
index 417df3a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/historyqueryset/SqliteQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.historyqueryset;
-
-
-public class SqliteQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/MysqlQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/MysqlQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/MysqlQuerySet.java
deleted file mode 100644
index d91633d..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/MysqlQuerySet.java
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
-
-
-public class MysqlQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/OracleQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/OracleQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/OracleQuerySet.java
deleted file mode 100644
index 3190885..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/OracleQuerySet.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
-
-
-public class OracleQuerySet extends QuerySet {
-
-  @Override
-  protected String fetchuserIdfromUsernameSql() {
-    return "select id from auth_user where username=?";
-
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =?";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime <= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime >= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime >= date(?) AND mtime <= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date(?) AND mtime <= date(?)";
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/PostgressQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/PostgressQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/PostgressQuerySet.java
deleted file mode 100644
index 83d32c7..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/PostgressQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
-
-
-public class PostgressQuerySet extends QuerySet {
-}


[05/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java
deleted file mode 100644
index 7e1bbf4..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeUtility.java
+++ /dev/null
@@ -1,225 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.revertchange;
-
-import java.beans.PropertyVetoException;
-import java.io.BufferedReader;
-import java.io.File;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.security.PrivilegedExceptionAction;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.sql.Statement;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.Date;
-import java.util.List;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.log4j.Logger;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-
-
-public class RevertChangeUtility  {
-
-
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  public boolean stringtoDatecompare(String datefromservlet,
-                                     String datefromfile) throws ParseException {
-
-    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd");
-    Date date1 = formatter.parse(datefromservlet);
-    Date date2 = formatter.parse(datefromfile);
-    if (date1.compareTo(date2) < 0) {
-      return true;
-    } else {
-      return false;
-    }
-
-  }
-
-  public void removedir(final String dir, final String namenodeuri)
-    throws IOException, URISyntaxException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration conf = new Configuration();
-          conf.set("fs.hdfs.impl",
-            org.apache.hadoop.hdfs.DistributedFileSystem.class
-              .getName());
-          conf.set("fs.file.impl",
-            org.apache.hadoop.fs.LocalFileSystem.class
-              .getName());
-          conf.set("fs.defaultFS", namenodeuri);
-          conf.set("hadoop.job.ugi", "hdfs");
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.delete(src, true);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      e.printStackTrace();
-    }
-  }
-
-  public MigrationModel revertChangeUtility(String instance, String revertDate,String jobid,ViewContext view,MigrationResponse migrationresult) throws IOException, ItemNotFound {
-
-    long startTime = System.currentTimeMillis();
-
-    final Logger logger = Logger.getLogger(RevertChangeUtility.class);
-
-    logger.info("------------------------------");
-    logger.info("Reverting the changes Start:");
-    logger.info("------------------------------");
-
-    logger.info("Revert Date " + revertDate);
-    logger.info("instance name " + instance);
-    int i = 0;
-
-    BufferedReader br = null;
-    Connection connectionAmbariDatabase = null;
-
-    try {
-      connectionAmbariDatabase = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
-      connectionAmbariDatabase.setAutoCommit(false);
-
-      Statement stmt = null;
-      stmt = connectionAmbariDatabase.createStatement();
-      SAXBuilder builder = new SAXBuilder();
-      File xmlFile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
-      try {
-
-        Document document = (Document) builder.build(xmlFile);
-        Element rootNode = document.getRootElement();
-        List list = rootNode.getChildren("RevertRecord");
-        logger.info("list size is = "+list.size());
-        for (i = 0; i < list.size(); i++) {
-
-          float calc = ((float) (i + 1)) / list.size() * 100;
-          int progressPercentage = Math.round(calc);
-
-          migrationresult.setIsNoQuerySelected("yes");
-          migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i+1);
-          migrationresult.setTotalNoQuery(list.size());
-
-          getResourceManager(view).update(migrationresult, jobid);
-
-          Element node = (Element) list.get(i);
-
-          if (node.getChildText("instance").equals(instance)) {
-            logger.info("instance matched");
-
-            if (stringtoDatecompare(revertDate, node.getChildText("datetime").toString())) {
-              logger.info("date is less query is sucess");
-              String sql = node.getChildText("query");
-              logger.info(sql);
-              stmt.executeUpdate(sql);
-              removedir(node.getChildText("dirname").toString(), view.getProperties().get("namenode_URI_Ambari"));
-              logger.info(node.getChildText("dirname").toString() + " deleted");
-
-            }
-            else {
-              logger.info("date is big query is failed");
-            }
-
-          }
-
-        }
-
-        connectionAmbariDatabase.commit();
-
-
-
-
-
-
-
-
-        logger.info("------------------------------");
-        logger.info("Reverting the changes End");
-        logger.info("------------------------------");
-
-      } catch (IOException e) {
-        logger.error("IOException: ", e);
-      } catch (ParseException e) {
-        logger.error("ParseException: ", e);
-      } catch (JDOMException e) {
-        logger.error("JDOMException: ", e);
-      } catch (URISyntaxException e) {
-        logger.error("URISyntaxException:  ", e);
-      }
-    } catch (SQLException e1) {
-      logger.error("SqlException  ", e1);
-      try {
-        connectionAmbariDatabase.rollback();
-        logger.info("Rollback done");
-      } catch (SQLException e2) {
-        logger.error("SqlException in Rollback  ", e2);
-      }
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: ", e);
-    }
-
-    long stopTime = System.currentTimeMillis();
-    long elapsedTime = stopTime - startTime;
-
-    MigrationModel model = new MigrationModel();
-//    model.setInstanceName(instance);
-//    model.setNumberofQueryTransfered(i + 1);
-//    model.setTimeTakentotransfer(String.valueOf(elapsedTime));
-
-    return model;
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoHive.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoHive.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoHive.java
new file mode 100644
index 0000000..f8d731d
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoHive.java
@@ -0,0 +1,51 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.model;
+
+public class PojoHive {
+
+  private String database;
+  private String owner;
+  private String query;
+
+  public String getDatabase() {
+    return database;
+  }
+
+  public void setDatabase(String database) {
+    this.database = database;
+  }
+
+  public String getOwner() {
+    return owner;
+  }
+
+  public void setOwner(String owner) {
+    this.owner = owner;
+  }
+
+  public String getQuery() {
+    return query;
+  }
+
+  public void setQuery(String query) {
+    this.query = query;
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoPig.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoPig.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoPig.java
new file mode 100644
index 0000000..05d0da9
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/model/PojoPig.java
@@ -0,0 +1,72 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.model;
+
+import java.util.Date;
+
+public class PojoPig {
+
+  private Date dt;
+  private String script;
+  private String Status;
+  private String title;
+  private String dir;
+
+  public Date getDt() {
+    return dt;
+  }
+
+  public void setDt(Date dt) {
+    this.dt = dt;
+  }
+
+  public String getScript() {
+    return script;
+  }
+
+  public void setScript(String script) {
+    this.script = script;
+  }
+
+  public String getStatus() {
+    return Status;
+  }
+
+  public void setStatus(String status) {
+    Status = status;
+  }
+
+  public String getTitle() {
+    return title;
+  }
+
+  public void setTitle(String title) {
+    this.title = title;
+  }
+
+  public String getDir() {
+    return dir;
+  }
+
+  public void setDir(String dir) {
+    this.dir = dir;
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/DataStoreStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/DataStoreStorage.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/DataStoreStorage.java
deleted file mode 100644
index 24b7959..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/DataStoreStorage.java
+++ /dev/null
@@ -1,151 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence;
-
-import org.apache.ambari.view.PersistenceException;
-import org.apache.ambari.view.ViewContext;
-
-import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.*;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.WebApplicationException;
-import java.util.LinkedList;
-import java.util.List;
-
-/**
- * Engine for storing objects to context DataStore storage
- */
-public class DataStoreStorage implements Storage {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(DataStoreStorage.class);
-  protected ViewContext context;
-
-  /**
-   * Constructor
-   * @param context View Context instance
-   */
-  public DataStoreStorage(ViewContext context) {
-    this.context = context;
-  }
-
-  @Override
-  public void store(Indexed obj) {
-    try {
-      if (obj.getId() == null) {
-        int id = nextIdForEntity(context, obj.getClass());
-        obj.setId(String.valueOf(id));
-      }
-      context.getDataStore().store(obj);
-    } catch (PersistenceException e) {
-      throw new ServiceFormattedException("Error while saving object to DataStorage", e);
-    }
-  }
-
-  private static int nextIdForEntity(ViewContext context, Class aClass) {
-    // auto increment id implementation
-    String lastId = context.getInstanceData(aClass.getName());
-    int newId;
-    if (lastId == null) {
-      newId = 1;
-    } else {
-      newId = Integer.parseInt(lastId) + 1;
-    }
-    context.putInstanceData(aClass.getName(), String.valueOf(newId));
-    return newId;
-  }
-
-  @Override
-  public  <T extends Indexed> T load(Class<T> model, int id) throws ItemNotFound {
-    LOG.debug(String.format("Loading %s #%d", model.getName(), id));
-    try {
-      T obj = context.getDataStore().find(model, String.valueOf(id));
-      if (obj != null) {
-        return obj;
-      } else {
-        throw new ItemNotFound();
-      }
-    } catch (PersistenceException e) {
-      throw new ServiceFormattedException("Error while finding object in DataStorage", e);
-    }
-  }
-
-  @Override
-  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model, FilteringStrategy filter) {
-    LinkedList<T> list = new LinkedList<T>();
-    LOG.debug(String.format("Loading all %s-s", model.getName()));
-    try {
-      for(T item: context.getDataStore().findAll(model, null)) {
-        if ((filter == null) || filter.isConform(item)) {
-          list.add(item);
-        }
-      }
-    } catch (PersistenceException e) {
-      throw new ServiceFormattedException("Error while finding all objects in DataStorage", e);
-    }
-    return list;
-  }
-
-  @Override
-  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model) {
-    return loadAll(model, new OnlyOwnersFilteringStrategy(this.context.getUsername()));
-  }
-
-  @Override
-  public synchronized void delete(Class model, int id) throws ItemNotFound {
-    LOG.debug(String.format("Deleting %s:%d", model.getName(), id));
-    Object obj = load(model, id);
-    try {
-      context.getDataStore().remove(obj);
-    } catch (PersistenceException e) {
-      throw new ServiceFormattedException("Error while removing object from DataStorage", e);
-    }
-  }
-
-  @Override
-  public boolean exists(Class model, int id) {
-    try {
-      return context.getDataStore().find(model, String.valueOf(id)) != null;
-    } catch (PersistenceException e) {
-      throw new ServiceFormattedException("Error while finding object in DataStorage", e);
-    }
-  }
-
-  public static void storageSmokeTest(ViewContext context) {
-    try {
-      SmokeTestEntity entity = new SmokeTestEntity();
-      entity.setData("42");
-      DataStoreStorage storage = new DataStoreStorage(context);
-      storage.store(entity);
-
-      if (entity.getId() == null) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly (auto increment id doesn't work)", null);
-      int id = Integer.parseInt(entity.getId());
-      SmokeTestEntity entity2 = storage.load(SmokeTestEntity.class, id);
-      boolean status = entity2.getData().compareTo("42") == 0;
-      storage.delete(SmokeTestEntity.class, id);
-      if (!status) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly", null);
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/InstanceKeyValueStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/InstanceKeyValueStorage.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/InstanceKeyValueStorage.java
deleted file mode 100644
index ec57f0a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/InstanceKeyValueStorage.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.*;
-import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
-import org.apache.commons.configuration.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import javax.ws.rs.WebApplicationException;
-
-
-/**
- * Persistent storage engine for storing java beans to
- * instance data
- */
-@Deprecated
-public class InstanceKeyValueStorage extends KeyValueStorage {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(InstanceKeyValueStorage.class);
-
-  private ContextConfigurationAdapter config = null;
-  private int VALUE_LENGTH_LIMIT = 254;
-
-  /**
-   * Constructor.
-   * @param context View Context instance
-   */
-  public InstanceKeyValueStorage(ViewContext context) {
-    super(context);
-  }
-
-  /**
-   * Returns config instance, adapter to Persistence API
-   * @return config instance
-   */
-  @Override
-  protected synchronized Configuration getConfig() {
-    if (config == null) {
-      config = new ContextConfigurationAdapter(context);
-    }
-    return config;
-  }
-
-  /**
-   * Value is limited to 256 symbols, this code splits value into chunks and saves them as <key>#<chunk_id>
-   * @param modelPropName key
-   * @param json value
-   */
-  protected void write(String modelPropName, String json) {
-    int saved = 0;
-    int page = 1;
-    while (saved < json.length()) {
-      int end = Math.min(saved + VALUE_LENGTH_LIMIT, json.length());
-      String substring = json.substring(saved, end);
-      getConfig().setProperty(modelPropName + "#" + page, substring);
-      saved += VALUE_LENGTH_LIMIT;
-      page += 1;
-      LOG.debug("Chunk saved: " + modelPropName + "#" + page + "=" + substring);
-    }
-    getConfig().setProperty(modelPropName, page - 1);
-    LOG.debug("Write finished: " + modelPropName + " pages:" + (page - 1));
-  }
-
-  /**
-   * Read chunked value (keys format <key>#<chunk_id>)
-   * @param modelPropName key
-   * @return value
-   */
-  protected String read(String modelPropName) {
-    StringBuilder result = new StringBuilder();
-    int pages = getConfig().getInt(modelPropName);
-    LOG.debug("Read started: " + modelPropName + " pages:" + pages);
-
-    for(int page = 1; page <= pages; page++) {
-      String substring = getConfig().getString(modelPropName + "#" + page);
-      LOG.debug("Chunk read: " + modelPropName + "#" + page + "=" + substring);
-      if (substring != null) {
-        result.append(substring);
-      }
-    }
-
-    return result.toString();
-  }
-
-  /**
-   * Remove chunked value (keys format <key>#<chunk_id>)
-   * @param modelPropName key
-   */
-  protected void clear(String modelPropName) {
-    int pages = getConfig().getInt(modelPropName);
-    LOG.debug("Clean started: " + modelPropName + " pages:" + pages);
-
-    for(int page = 1; page <= pages; page++) {
-      getConfig().clearProperty(modelPropName + "#" + page);
-      LOG.debug("Chunk clean: " + modelPropName + "#" + page);
-    }
-    getConfig().clearProperty(modelPropName);
-  }
-
-  public static void storageSmokeTest(ViewContext context) {
-    try {
-      final String property = "test.smoke.property";
-      context.putInstanceData(property, "42");
-      boolean status = context.getInstanceData(property).equals("42");
-      context.removeInstanceData(property);
-      if (!status) throw new ServiceFormattedException("Ambari Views instance data DB doesn't work properly", null);
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/KeyValueStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/KeyValueStorage.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/KeyValueStorage.java
deleted file mode 100644
index fd07a39..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/KeyValueStorage.java
+++ /dev/null
@@ -1,162 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence;
-
-import com.google.gson.Gson;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.FilteringStrategy;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.Indexed;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.commons.configuration.Configuration;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.ArrayList;
-import java.util.List;
-
-/**
- * Engine for storing objects to key-value storage
- */
-public abstract class KeyValueStorage implements Storage {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(KeyValueStorage.class);
-  protected final Gson gson = new Gson();
-  protected ViewContext context;
-
-  /**
-   * Constructor
-   * @param context View Context instance
-   */
-  public KeyValueStorage(ViewContext context) {
-    this.context = context;
-  }
-
-  /**
-   * Returns config instance, adapter to Persistence API
-   * @return config instance
-   */
-  protected abstract Configuration getConfig();
-
-  @Override
-  public synchronized void store(Indexed obj) {
-    String modelIndexingPropName = getIndexPropertyName(obj.getClass());
-
-    if (obj.getId() == null) {
-      int lastIndex = getConfig().getInt(modelIndexingPropName, 0);
-      lastIndex ++;
-      getConfig().setProperty(modelIndexingPropName, lastIndex);
-      obj.setId(Integer.toString(lastIndex));
-    }
-
-    String modelPropName = getItemPropertyName(obj.getClass(), Integer.parseInt(obj.getId()));
-    String json = serialize(obj);
-    write(modelPropName, json);
-  }
-
-  @Override
-  public <T extends Indexed> T load(Class<T> model, int id) throws ItemNotFound {
-    String modelPropName = getItemPropertyName(model, id);
-    LOG.debug(String.format("Loading %s", modelPropName));
-    if (getConfig().containsKey(modelPropName)) {
-      String json = read(modelPropName);
-      LOG.debug(String.format("json: %s", json));
-      return deserialize(model, json);
-    } else {
-      throw new ItemNotFound();
-    }
-  }
-
-  /**
-   * Write json to storage
-   * @param modelPropName key
-   * @param json value
-   */
-  protected void write(String modelPropName, String json) {
-    getConfig().setProperty(modelPropName, json);
-  }
-
-  /**
-   * Read json from storage
-   * @param modelPropName key
-   * @return value
-   */
-  protected String read(String modelPropName) {
-    return getConfig().getString(modelPropName);
-  }
-
-  /**
-   * Remove line from storage
-   * @param modelPropName key
-   */
-  protected void clear(String modelPropName) {
-    getConfig().clearProperty(modelPropName);
-  }
-
-  protected String serialize(Indexed obj) {
-    return gson.toJson(obj);
-  }
-
-  protected <T extends Indexed> T deserialize(Class<T> model, String json) {
-    return gson.fromJson(json, model);
-  }
-
-  @Override
-  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model, FilteringStrategy filter) {
-    ArrayList<T> list = new ArrayList<T>();
-    String modelIndexingPropName = getIndexPropertyName(model);
-    LOG.debug(String.format("Loading all %s-s", model.getName()));
-    int lastIndex = getConfig().getInt(modelIndexingPropName, 0);
-    for(int i=1; i<=lastIndex; i++) {
-      try {
-        T item = load(model, i);
-        if ((filter == null) || filter.isConform(item)) {
-          list.add(item);
-        }
-      } catch (ItemNotFound ignored) {
-      }
-    }
-    return list;
-  }
-
-  @Override
-  public synchronized <T extends Indexed> List<T> loadAll(Class<T> model) {
-    return loadAll(model, new OnlyOwnersFilteringStrategy(this.context.getUsername()));
-  }
-
-  @Override
-  public synchronized void delete(Class model, int id) {
-    LOG.debug(String.format("Deleting %s:%d", model.getName(), id));
-    String modelPropName = getItemPropertyName(model, id);
-    clear(modelPropName);
-  }
-
-  @Override
-  public boolean exists(Class model, int id) {
-    return getConfig().containsKey(getItemPropertyName(model, id));
-  }
-
-  private String getIndexPropertyName(Class model) {
-    return String.format("%s:index", model.getName());
-  }
-
-  private String getItemPropertyName(Class model, int id) {
-    return String.format("%s.%d", model.getName(), id);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/LocalKeyValueStorage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/LocalKeyValueStorage.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/LocalKeyValueStorage.java
deleted file mode 100644
index cfbe7f7..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/LocalKeyValueStorage.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.utils.MisconfigurationFormattedException;
-import org.apache.commons.configuration.ConfigurationException;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-
-/**
- * Persistent storage engine for storing java beans to
- * properties file
- * Path to file should be in 'dataworker.storagePath' parameter
- */
-@Deprecated
-public class LocalKeyValueStorage extends KeyValueStorage {
-  private final static Logger LOG =
-      LoggerFactory.getLogger(LocalKeyValueStorage.class);
-
-  private PersistentConfiguration config = null;
-
-  /**
-   * Constructor
-   * @param context View Context instance
-   */
-  public LocalKeyValueStorage(ViewContext context) {
-    super(context);
-  }
-
-  /**
-   * Returns config instance
-   * @return config instance
-   */
-  @Override
-  protected synchronized PersistentConfiguration getConfig() {
-    if (config == null) {
-      String fileName = context.getProperties().get("dataworker.storagePath");
-      if (fileName == null) {
-        String msg = "dataworker.storagePath is not configured!";
-        LOG.error(msg);
-        throw new MisconfigurationFormattedException("dataworker.storagePath");
-      }
-      try {
-        config = new PersistentConfiguration(fileName);
-      } catch (ConfigurationException e) {
-        e.printStackTrace();
-      }
-    }
-    return config;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/PersistentConfiguration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/PersistentConfiguration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/PersistentConfiguration.java
deleted file mode 100644
index a97c5f7..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/PersistentConfiguration.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence;
-
-import org.apache.commons.configuration.ConfigurationException;
-import org.apache.commons.configuration.PropertiesConfiguration;
-import org.apache.commons.configuration.reloading.FileChangedReloadingStrategy;
-
-import java.io.File;
-
-/**
- * Configuration enables all necessary options for PropertiesConfiguration:
- * auto-save, auto-reloading, no delimiter parsing and other
- */
-@Deprecated
-public class PersistentConfiguration extends PropertiesConfiguration {
-  /**
-   * Constructor
-   * @param fileName path to data file
-   * @throws ConfigurationException
-   */
-  public PersistentConfiguration(String fileName) throws ConfigurationException {
-    super();
-
-    File config = new File(fileName);
-    setFile(config);
-    this.setAutoSave(true);
-    this.setReloadingStrategy(new FileChangedReloadingStrategy());
-    this.setDelimiterParsingDisabled(true);
-    this.setListDelimiter((char) 0);
-
-    if (config.exists()) {
-      this.load();
-    }
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/SmokeTestEntity.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/SmokeTestEntity.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/SmokeTestEntity.java
deleted file mode 100644
index 2369781..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/SmokeTestEntity.java
+++ /dev/null
@@ -1,45 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence;
-
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.Indexed;
-
-/**
- * DataStore entity to test whether DS interface works correctly.
- */
-public class SmokeTestEntity implements Indexed {
-  private String id = null;
-  private String data = null;
-
-  public String getId() {
-    return id;
-  }
-
-  public void setId(String id) {
-    this.id = id;
-  }
-
-  public String getData() {
-    return data;
-  }
-
-  public void setData(String data) {
-    this.data = data;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/Storage.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/Storage.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/Storage.java
deleted file mode 100644
index ccfca6b..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/Storage.java
+++ /dev/null
@@ -1,78 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence;
-
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.FilteringStrategy;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.Indexed;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-
-import java.util.List;
-
-/**
- * Object storage interface
- */
-public interface Storage {
-  /**
-   * Persist object to DB. It should be Indexed
-   * @param obj object to save
-   */
-  void store(Indexed obj);
-
-  /**
-   * Load object
-   * @param model bean class
-   * @param id identifier
-   * @param <T> bean class
-   * @return bean instance
-   * @throws ItemNotFound thrown if item with id was not found in DB
-   */
-  <T extends Indexed> T load(Class<T> model, int id) throws ItemNotFound;
-
-  /**
-   * Load all objects of given bean class
-   * @param model bean class
-   * @param filter filtering strategy (return only those objects that conform condition)
-   * @param <T> bean class
-   * @return list of filtered objects
-   */
-  <T extends Indexed> List<T> loadAll(Class<T> model, FilteringStrategy filter);
-
-  /**
-   * Load all objects of given bean class
-   * @param model bean class
-   * @param <T> bean class
-   * @return list of all objects
-   */
-  <T extends Indexed> List<T> loadAll(Class<T> model);
-
-  /**
-   * Delete object
-   * @param model bean class
-   * @param id identifier
-   */
-  void delete(Class model, int id) throws ItemNotFound;
-
-  /**
-   * Check is object exists
-   * @param model bean class
-   * @param id identifier
-   * @return true if exists
-   */
-  boolean exists(Class model, int id);
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ContextConfigurationAdapter.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ContextConfigurationAdapter.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ContextConfigurationAdapter.java
deleted file mode 100644
index 082687e..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ContextConfigurationAdapter.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence.utils;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.commons.configuration.Configuration;
-
-import java.math.BigDecimal;
-import java.math.BigInteger;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Properties;
-
-/**
- * Persistence API to Apache Configuration adapter
- */
-@Deprecated
-public class ContextConfigurationAdapter implements Configuration {
-  private ViewContext context;
-
-  /**
-   * Constructor of adapter
-   * @param context View Context
-   */
-  public ContextConfigurationAdapter(ViewContext context) {
-    this.context = context;
-  }
-
-  @Override
-  public Configuration subset(String prefix) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public boolean isEmpty() {
-    return context.getInstanceData().isEmpty();
-  }
-
-  @Override
-  public boolean containsKey(String s) {
-    Map<String, String> data = context.getInstanceData();
-    return data.containsKey(s);
-  }
-
-  @Override
-  public void addProperty(String s, Object o) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public void setProperty(String s, Object o) {
-    context.putInstanceData(s, o.toString());
-  }
-
-  @Override
-  public void clearProperty(String key) {
-    context.removeInstanceData(key);
-  }
-
-  @Override
-  public void clear() {
-    for (String key : context.getInstanceData().keySet())
-      context.removeInstanceData(key);
-  }
-
-  @Override
-  public Object getProperty(String key) {
-    return context.getInstanceData(key);
-  }
-
-  @Override
-  public Iterator getKeys(String s) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public Iterator getKeys() {
-    return context.getInstanceData().keySet().iterator();
-  }
-
-  @Override
-  public Properties getProperties(String s) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public boolean getBoolean(String s) {
-    return getBoolean(s, null);
-  }
-
-  @Override
-  public boolean getBoolean(String s, boolean b) {
-    return getBoolean(s, (Boolean)b);
-  }
-
-  @Override
-  public Boolean getBoolean(String s, Boolean aBoolean) {
-    String data = context.getInstanceData(s);
-    return (data != null)?Boolean.parseBoolean(data):aBoolean;
-  }
-
-  @Override
-  public byte getByte(String s) {
-    return getByte(s, null);
-  }
-
-  @Override
-  public byte getByte(String s, byte b) {
-    return getByte(s, (Byte)b);
-  }
-
-  @Override
-  public Byte getByte(String s, Byte aByte) {
-    String data = context.getInstanceData(s);
-    return (data != null)?Byte.parseByte(data):aByte;
-  }
-
-  @Override
-  public double getDouble(String s) {
-    return getDouble(s, null);
-  }
-
-  @Override
-  public double getDouble(String s, double v) {
-    return getDouble(s, (Double)v);
-  }
-
-  @Override
-  public Double getDouble(String s, Double aDouble) {
-    String data = context.getInstanceData(s);
-    return (data != null)?Double.parseDouble(data):aDouble;
-  }
-
-  @Override
-  public float getFloat(String s) {
-    return getFloat(s, null);
-  }
-
-  @Override
-  public float getFloat(String s, float v) {
-    return getFloat(s, (Float)v);
-  }
-
-  @Override
-  public Float getFloat(String s, Float aFloat) {
-    String data = context.getInstanceData(s);
-    return (data != null)?Float.parseFloat(data):aFloat;
-  }
-
-  @Override
-  public int getInt(String s) {
-    return getInteger(s, null);
-  }
-
-  @Override
-  public int getInt(String s, int i) {
-    return getInteger(s, i);
-  }
-
-  @Override
-  public Integer getInteger(String s, Integer integer) {
-    String data = context.getInstanceData(s);
-    return (data != null)?Integer.parseInt(data):integer;
-  }
-
-  @Override
-  public long getLong(String s) {
-    return getLong(s, null);
-  }
-
-  @Override
-  public long getLong(String s, long l) {
-    return getLong(s, (Long)l);
-  }
-
-  @Override
-  public Long getLong(String s, Long aLong) {
-    String data = context.getInstanceData(s);
-    return (data != null)?Long.parseLong(data):aLong;
-  }
-
-  @Override
-  public short getShort(String s) {
-    return getShort(s, null);
-  }
-
-  @Override
-  public short getShort(String s, short i) {
-    return getShort(s, (Short)i);
-  }
-
-  @Override
-  public Short getShort(String s, Short aShort) {
-    String data = context.getInstanceData(s);
-    return (data != null)?Short.parseShort(data):aShort;
-  }
-
-  @Override
-  public BigDecimal getBigDecimal(String s) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public BigDecimal getBigDecimal(String s, BigDecimal bigDecimal) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public BigInteger getBigInteger(String s) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public BigInteger getBigInteger(String s, BigInteger bigInteger) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public String getString(String s) {
-    return context.getInstanceData(s);
-  }
-
-  @Override
-  public String getString(String s, String s2) {
-    String data = getString(s);
-    return (data != null)?data:s2;
-  }
-
-  @Override
-  public String[] getStringArray(String s) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public List getList(String s) {
-    throw new UnsupportedOperationException();
-  }
-
-  @Override
-  public List getList(String s, List list) {
-    throw new UnsupportedOperationException();
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/FilteringStrategy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/FilteringStrategy.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/FilteringStrategy.java
deleted file mode 100644
index 671aad9..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/FilteringStrategy.java
+++ /dev/null
@@ -1,31 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence.utils;
-
-/**
- * Filtering strategy for stored objects
- */
-public interface FilteringStrategy {
-  /**
-   * Check whether item conforms chosen filter or not
-   * @param item item to check
-   * @return true if item conforms this filter
-   */
-  boolean isConform(Indexed item);
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Indexed.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Indexed.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Indexed.java
deleted file mode 100644
index 31a3940..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Indexed.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence.utils;
-
-/**
- * Interface to represent item with identifier
- */
-public interface Indexed {
-  /**
-   * Get the ID
-   * @return ID
-   */
-  String getId();
-
-  /**
-   * Set ID
-   * @param id ID
-   */
-  void setId(String id);
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ItemNotFound.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ItemNotFound.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ItemNotFound.java
deleted file mode 100644
index c6e9c6f..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/ItemNotFound.java
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence.utils;
-
-/**
- * Thrown when item was not found in DB
- */
-public class ItemNotFound extends Exception {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/OnlyOwnersFilteringStrategy.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/OnlyOwnersFilteringStrategy.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/OnlyOwnersFilteringStrategy.java
deleted file mode 100644
index 15ac1e7..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/OnlyOwnersFilteringStrategy.java
+++ /dev/null
@@ -1,33 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence.utils;
-
-public class OnlyOwnersFilteringStrategy implements FilteringStrategy {
-  private final String username;
-
-  public OnlyOwnersFilteringStrategy(String username) {
-    this.username = username;
-  }
-
-  @Override
-  public boolean isConform(Indexed item) {
-    Owned object = (Owned) item;
-    return object.getOwner().compareTo(username) == 0;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Owned.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Owned.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Owned.java
deleted file mode 100644
index b99983e..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/Owned.java
+++ /dev/null
@@ -1,36 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence.utils;
-
-/**
- * Interface to represent item with owner
- */
-public interface Owned {
-  /**
-   * Get the owner
-   * @return owner
-   */
-  String getOwner();
-
-  /**
-   * Set owner
-   * @param owner owner
-   */
-  void setOwner(String owner);
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/PersonalResource.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/PersonalResource.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/PersonalResource.java
deleted file mode 100644
index 9978659..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/PersonalResource.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence.utils;
-
-public interface PersonalResource extends Indexed, Owned {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/StorageUtil.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/StorageUtil.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/StorageUtil.java
deleted file mode 100644
index 86ebe60..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/persistence/utils/StorageUtil.java
+++ /dev/null
@@ -1,94 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.persistence.utils;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.DataStoreStorage;
-import org.apache.ambari.view.huetoambarimigration.persistence.LocalKeyValueStorage;
-import org.apache.ambari.view.huetoambarimigration.persistence.Storage;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.util.HashMap;
-import java.util.Map;
-
-/**
- * Storage factory, creates storage of Local or Persistence API type.
- * Type depends on context configuration: if "dataworker.storagePath" is set,
- * storage of Local type will be created.  Otherwise, Persistence API will be used.
- *
- * Storage is singleton.
- */
-public class StorageUtil {
-  private Storage storageInstance = null;
-
-  protected final static Logger LOG =
-      LoggerFactory.getLogger(StorageUtil.class);
-
-
-  private static Map<String, StorageUtil> viewSingletonObjects = new HashMap<String, StorageUtil>();
-  public static StorageUtil getInstance(ViewContext context) {
-    if (!viewSingletonObjects.containsKey(context.getInstanceName()))
-      viewSingletonObjects.put(context.getInstanceName(), new StorageUtil(context));
-    return viewSingletonObjects.get(context.getInstanceName());
-  }
-
-  public static void dropAllConnections() {
-    viewSingletonObjects.clear();
-  }
-
-  private ViewContext context;
-
-  /**
-   * Constructor of storage util
-   * @param context View Context instance
-   */
-  public StorageUtil(ViewContext context) {
-    this.context = context;
-  }
-
-  /**
-   * Get storage instance. If one is not created, creates instance.
-   * @return storage instance
-   */
-  public synchronized Storage getStorage() {
-    if (storageInstance == null) {
-      String fileName = context.getProperties().get("dataworker.storagePath");
-      if (fileName != null) {
-        LOG.debug("Using local storage in " + fileName + " to store data");
-        // If specifed, use LocalKeyValueStorage - key-value file based storage
-        storageInstance = new LocalKeyValueStorage(context);
-      } else {
-        LOG.debug("Using Persistence API to store data");
-        // If not specifed, use ambari-views Persistence API
-        storageInstance = new DataStoreStorage(context);
-      }
-    }
-    return storageInstance;
-  }
-
-  /**
-   * Set storage to use across all application.
-   * Used in unit tests.
-   * @param storage storage instance
-   */
-  public void setStorage(Storage storage) {
-    storageInstance = storage;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/CRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/CRUDResourceManager.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/CRUDResourceManager.java
deleted file mode 100644
index cc338a3..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/CRUDResourceManager.java
+++ /dev/null
@@ -1,123 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.resources;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.Storage;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.FilteringStrategy;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.Indexed;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.StorageUtil;
-
-import java.util.List;
-
-/**
- * CRUD resource manager
- * @param <T> Data type with ID
- */
-abstract public class CRUDResourceManager<T extends Indexed> {
-  private Storage storage = null;
-
-  protected final Class<T> resourceClass;
-
-  /**
-   * Constructor
-   * @param responseClass model class
-   */
-  public CRUDResourceManager(Class<T> responseClass) {
-    this.resourceClass = responseClass;
-  }
-  // CRUD operations
-
-  /**
-   * Create operation
-   * @param object object
-   * @return model object
-   */
-  public T create(T object) {
-    object.setId(null);
-    return this.save(object);
-  }
-
-  /**
-   * Read operation
-   * @param id identifier
-   * @return model object
-   * @throws ItemNotFound
-   */
-  public T read(String id) throws ItemNotFound {
-    T object = null;
-    object = getMigrationStorage().load(this.resourceClass, Integer.parseInt(id));
-    if (!checkPermissions(object))
-      throw new ItemNotFound();
-    return object;
-  }
-
-  /**
-   * Read all objects
-   * @param filteringStrategy filtering strategy
-   * @return list of filtered objects
-   */
-  public List<T> readAll(FilteringStrategy filteringStrategy) {
-    return getMigrationStorage().loadAll(this.resourceClass, filteringStrategy);
-  }
-
-  /**
-   * Update operation
-   * @param newObject new object
-   * @param id identifier of previous object
-   * @return model object
-   * @throws ItemNotFound
-   */
-  public T update(T newObject, String id) throws ItemNotFound {
-    newObject.setId(id);
-    this.save(newObject);
-    return newObject;
-  }
-
-  /**
-   * Delete operation
-   * @param resourceId object identifier
-   * @throws ItemNotFound
-   */
-  public void delete(String resourceId) throws ItemNotFound {
-    int id = Integer.parseInt(resourceId);
-    if (!getMigrationStorage().exists(this.resourceClass, id)) {
-      throw new ItemNotFound();
-    }
-    getMigrationStorage().delete(this.resourceClass, id);
-  }
-
-  // UTILS
-
-  protected T save(T object) {
-    getMigrationStorage().store(object);
-    return object;
-  }
-
-  protected Storage getMigrationStorage() {
-    if (storage == null) {
-      storage = StorageUtil.getInstance(getContext()).getStorage();
-    }
-    return storage;
-  }
-
-  protected abstract boolean checkPermissions(T object);
-  protected abstract ViewContext getContext();
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/PersonalCRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/PersonalCRUDResourceManager.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/PersonalCRUDResourceManager.java
deleted file mode 100644
index 37409f0..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/PersonalCRUDResourceManager.java
+++ /dev/null
@@ -1,105 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.resources;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.PersonalResource;
-
-import java.util.concurrent.Callable;
-
-/**
- * Resource manager that returns only user owned elements from DB
- * @param <T> Data type with ID and Owner
- */
-public class PersonalCRUDResourceManager<T extends PersonalResource> extends CRUDResourceManager<T> {
-  protected ViewContext context;
-  protected boolean ignorePermissions = false;
-
-  /**
-   * Constructor
-   * @param responseClass model class
-   * @param context View Context instance
-   */
-  public PersonalCRUDResourceManager(Class<T> responseClass, ViewContext context) {
-    super(responseClass);
-    this.context = context;
-  }
-
-  @Override
-  public T update(T newObject, String id) throws ItemNotFound {
-    T object = getMigrationStorage().load(this.resourceClass, Integer.parseInt(id));
-    if (object.getOwner().compareTo(this.context.getUsername()) != 0) {
-      throw new ItemNotFound();
-    }
-
-    newObject.setOwner(this.context.getUsername());
-    return super.update(newObject, id);
-  }
-
-  @Override
-  public T save(T object) {
-    if (!ignorePermissions) {
-      // in threads permissions should be ignored,
-      // because context.getUsername doesn't work. See BUG-27093.
-      object.setOwner(this.context.getUsername());
-    }
-    return super.save(object);
-  }
-
-  @Override
-  protected boolean checkPermissions(T object) {
-    if (ignorePermissions)
-      return true;
-    return object.getOwner().compareTo(this.context.getUsername()) == 0;
-  }
-
-  @Override
-  public ViewContext getContext() {
-    return context;
-  }
-
-  /**
-   * Execute action ignoring objects owner
-   * @param actions callable to execute
-   * @return value returned from actions
-   * @throws Exception
-   */
-  public <T> T ignorePermissions(Callable<T> actions) throws Exception {
-    ignorePermissions = true;
-    T result;
-    try {
-      result = actions.call();
-    } finally {
-      ignorePermissions = false;
-    }
-    return result;
-  }
-
-  protected static String getUsername(ViewContext context) {
-    String userName = context.getProperties().get("dataworker.username");
-    if (userName == null || userName.compareTo("null") == 0 || userName.compareTo("") == 0)
-      userName = context.getUsername();
-    return userName;
-  }
-
-  protected String getUsername() {
-    return getUsername(context);
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/SharedCRUDResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/SharedCRUDResourceManager.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/SharedCRUDResourceManager.java
deleted file mode 100644
index c4b9cbd..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/SharedCRUDResourceManager.java
+++ /dev/null
@@ -1,50 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.resources;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.Indexed;
-
-/**
- * Resource manager that doesn't restrict access (Allow all)
- * @param <T> Data type with ID
- */
-public class SharedCRUDResourceManager<T extends Indexed> extends CRUDResourceManager<T> {
-  protected ViewContext context;
-
-  /**
-   * Constructor
-   * @param responseClass model class
-   * @param context View Context instance
-   */
-  public SharedCRUDResourceManager(Class<T> responseClass, ViewContext context) {
-    super(responseClass);
-    this.context = context;
-  }
-
-  @Override
-  protected boolean checkPermissions(T object) {
-    return true; //everyone has permission
-  }
-
-  @Override
-  protected ViewContext getContext() {
-    return context;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/MigrationResourceManager.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/MigrationResourceManager.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/MigrationResourceManager.java
deleted file mode 100644
index a7af714..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/MigrationResourceManager.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.resources.scripts;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.log4j.Logger;
-
-/**
- * Object that provides CRUD operations for script objects
- */
-public class MigrationResourceManager extends PersonalCRUDResourceManager<MigrationResponse> {
-  final Logger logger = Logger.getLogger(MigrationResourceManager.class);
-
-  /**
-   * Constructor
-   * @param context View Context instance
-   */
-  public MigrationResourceManager(ViewContext context) {
-    super(MigrationResponse.class, context);
-  }
-
-  @Override
-  public MigrationResponse create(MigrationResponse object) {
-
-    super.create(object);
-
-    return object;
-  }
-
-  private void createDefaultScriptFile(MigrationResponse object) {
-    getMigrationStorage().store(object);
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/MigrationResourceProvider.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/MigrationResourceProvider.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/MigrationResourceProvider.java
deleted file mode 100644
index b93cb15..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/MigrationResourceProvider.java
+++ /dev/null
@@ -1,108 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.resources.scripts;
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.*;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.OnlyOwnersFilteringStrategy;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.lang.reflect.InvocationTargetException;
-import java.util.HashSet;
-import java.util.Map;
-import java.util.Set;
-
-/**
- * Resource provider for Scripts
- */
-public class MigrationResourceProvider implements ResourceProvider<MigrationResponse> {
-  @Inject
-  ViewContext context;
-
-  protected MigrationResourceManager resourceManager = null;
-  final org.apache.log4j.Logger logger = org.apache.log4j.Logger.getLogger(MigrationResourceManager.class);
-
-  protected synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(context);
-    }
-    return resourceManager;
-  }
-
-  @Override
-  public MigrationResponse getResource(String resourceId, Set<String> properties) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
-    try {
-      return getResourceManager().read(resourceId);
-    } catch (ItemNotFound itemNotFound) {
-      throw new NoSuchResourceException(resourceId);
-    }
-  }
-
-  @Override
-  public Set<MigrationResponse> getResources(ReadRequest readRequest) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
-    return new HashSet<MigrationResponse>(getResourceManager().readAll(
-        new OnlyOwnersFilteringStrategy(this.context.getUsername())));
-  }
-
-  @Override
-  public void createResource(String s, Map<String, Object> stringObjectMap) throws SystemException, ResourceAlreadyExistsException, NoSuchResourceException, UnsupportedPropertyException {
-    MigrationResponse mr = null;
-    try {
-      mr = new MigrationResponse(stringObjectMap);
-    } catch (InvocationTargetException e) {
-      throw new SystemException("error on creating resource", e);
-    } catch (IllegalAccessException e) {
-      throw new SystemException("error on creating resource", e);
-    }
-    getResourceManager().create(mr);
-  }
-
-  @Override
-  public boolean updateResource(String resourceId, Map<String, Object> stringObjectMap) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
-    MigrationResponse script = null;
-    try {
-      script = new MigrationResponse(stringObjectMap);
-    } catch (InvocationTargetException e) {
-      throw new SystemException("error on updating resource", e);
-    } catch (IllegalAccessException e) {
-      throw new SystemException("error on updating resource", e);
-    }
-    try {
-      getResourceManager().update(script, resourceId);
-    } catch (ItemNotFound itemNotFound) {
-      throw new NoSuchResourceException(resourceId);
-    }
-    return true;
-  }
-
-  @Override
-  public boolean deleteResource(String resourceId) throws SystemException, NoSuchResourceException, UnsupportedPropertyException {
-    try {
-      getResourceManager().delete(resourceId);
-    } catch (ItemNotFound itemNotFound) {
-      throw new NoSuchResourceException(resourceId);
-    }
-    return true;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ConfigurationModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ConfigurationModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ConfigurationModel.java
deleted file mode 100644
index 4a29699..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ConfigurationModel.java
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-
-public class ConfigurationModel {
-
-
-  private int id;
-  private String configParameter;
-  private String configStatus;
-
-  public int getId() {
-    return id;
-  }
-
-  public void setId(int id) {
-    this.id = id;
-  }
-
-  public String getConfigParameter() {
-    return configParameter;
-  }
-
-  public void setConfigParameter(String configParameter) {
-    this.configParameter = configParameter;
-  }
-
-  public String getConfigStatus() {
-    return configStatus;
-  }
-
-  public void setConfigStatus(String configStatus) {
-    this.configStatus = configStatus;
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/HiveModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/HiveModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/HiveModel.java
deleted file mode 100644
index fb9496a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/HiveModel.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-public class HiveModel {
-
-  private String database;
-  private String owner;
-  private String query;
-
-  public String getDatabase() {
-    return database;
-  }
-
-  public void setDatabase(String database) {
-    this.database = database;
-  }
-
-  public String getOwner() {
-    return owner;
-  }
-
-  public void setOwner(String owner) {
-    this.owner = owner;
-  }
-
-  public String getQuery() {
-    return query;
-  }
-
-  public void setQuery(String query) {
-    this.query = query;
-  }
-
-}


[08/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java
deleted file mode 100644
index c81d51a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/QuerySet.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-
-public  abstract class QuerySet {
-
-
-  public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql());
-    prSt.setString(1, username);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql());
-    prSt.setInt(1, id);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, enddate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, startdate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, startdate);
-    prSt.setString(3, endate);
-    return prSt;
-  }
-
-  /**
-   * for all user
-   */
-  public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql());
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql());
-    prSt.setString(1, enddate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql());
-    prSt.setString(1, startdate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql());
-    prSt.setString(1, startdate);
-    prSt.setString(2, endate);
-    return prSt;
-  }
-
-
-  protected String fetchuserIdfromUsernameSql() {
-    return "select id from auth_user where username=?;";
-
-  }
-
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =?;";
-  }
-
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime <= date(?);";
-  }
-
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime >= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id =? AND mtime >= date(?) AND mtime <= date(?);";
-
-  }
-
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query';";
-  }
-
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date(?) AND mtime <= date(?);";
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/SqliteQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/SqliteQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/SqliteQuerySet.java
deleted file mode 100644
index 3460353..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/hive/savedqueryset/SqliteQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset;
-
-
-public class SqliteQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/MysqlQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/MysqlQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/MysqlQuerySet.java
deleted file mode 100644
index a9e2ef5..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/MysqlQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
-
-
-public class MysqlQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/OracleQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/OracleQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/OracleQuerySet.java
deleted file mode 100644
index 4f9e749..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/OracleQuerySet.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
-
-
-public class OracleQuerySet extends QuerySet {
-  @Override
-  protected String fetchuserIdfromUsernameSql() {
-    return   "select id from auth_user where username=?";
-
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =?";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =?  AND start_time <= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =? AND start_time >= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =? AND start_time >= date(?) AND start_time <= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job ";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time <= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time >= date(?)";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time >= date(?) AND start_time <= date(?)";
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/PostgressQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/PostgressQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/PostgressQuerySet.java
deleted file mode 100644
index 84a66f1..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/PostgressQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
-
-
-public class PostgressQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/QuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/QuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/QuerySet.java
deleted file mode 100644
index b9bdf9a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/QuerySet.java
+++ /dev/null
@@ -1,132 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-
-public abstract class QuerySet {
-
-  public PreparedStatement getUseridfromUserName(Connection connection, String username) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql());
-    prSt.setString(1, username);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection, int id) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql());
-    prSt.setInt(1, id);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection, int id, String enddate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, enddate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection, int id, String startdate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, startdate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection, int id, String startdate, String endate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, startdate);
-    prSt.setString(3, endate);
-    return prSt;
-  }
-
-  /**
-   * for all user
-   */
-  public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql());
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection, String enddate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql());
-    prSt.setString(1, enddate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection, String startdate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql());
-    prSt.setString(1, startdate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection, String startdate, String endate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql());
-    prSt.setString(1, startdate);
-    prSt.setString(2, endate);
-    return prSt;
-  }
-
-  protected String fetchuserIdfromUsernameSql() {
-    return "select id from auth_user where username=?;";
-
-  }
-
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =?;";
-  }
-
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =?  AND start_time <= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =? AND start_time >= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where user_id =? AND start_time >= date(?) AND start_time <= date(?);";
-
-  }
-
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job ;";
-  }
-
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time <= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time >= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select status,start_time,statusdir,script_title,user_id from pig_job where  start_time >= date(?) AND start_time <= date(?);";
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/SqliteQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/SqliteQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/SqliteQuerySet.java
deleted file mode 100644
index af216fe..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/jobqueryset/SqliteQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset;
-
-
-public class SqliteQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/MysqlQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/MysqlQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/MysqlQuerySet.java
deleted file mode 100644
index 6c27993..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/MysqlQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
-
-
-public class MysqlQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/OracleQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/OracleQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/OracleQuerySet.java
deleted file mode 100644
index 70e46d7..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/OracleQuerySet.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
-
-
-public class OracleQuerySet extends QuerySet {
-
-  @Override
-  protected String fetchuserIdfromUsernameSql() {
-    return "select id from auth_user where username=?";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =?";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND  date_created <= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND date_created >= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND date_created >= date(?) AND date_created <= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 ";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND  date_created <= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND date_created >= date(?)";
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND date_created >= date(?) AND date_created <= date(?)";
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/PostgressQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/PostgressQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/PostgressQuerySet.java
deleted file mode 100644
index 90939f1..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/PostgressQuerySet.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
-
-
-public class PostgressQuerySet extends QuerySet {
-
-  @Override
-  protected String fetchuserIdfromUsernameSql() {
-    return "select id from auth_user where username=?;";
-
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =?;";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =? AND  date_created <= date(?);";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =? AND date_created >= date(?);";
-
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' AND user_id =? AND date_created >= date(?) AND date_created <= date(?);";
-
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true' ;";
-  }
-  @Override
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true'  AND  date_created <= date(?);";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true'  AND date_created >= date(?);";
-
-  }
-  @Override
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved='true'  AND date_created >= date(?) AND date_created <= date(?);";
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/QuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/QuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/QuerySet.java
deleted file mode 100644
index eebf242..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/QuerySet.java
+++ /dev/null
@@ -1,135 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-
-public abstract class QuerySet {
-
-  public PreparedStatement getUseridfromUserName(Connection connection,String username) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchuserIdfromUsernameSql());
-    prSt.setString(1, username);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateNoEndDate(Connection connection,int id) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateSql());
-    prSt.setInt(1, id);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateYesEndDate(Connection connection,int id,String enddate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, enddate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateNoEndDate(Connection connection,int id,String startdate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, startdate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateYesEndDate(Connection connection,int id,String startdate,String endate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateSql());
-    prSt.setInt(1, id);
-    prSt.setString(2, startdate);
-    prSt.setString(3, endate);
-    return prSt;
-  }
-
-  /**
-   * for all user
-   * */
-  public PreparedStatement getQueriesNoStartDateNoEndDateAllUser(Connection connection) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateNoEnddateYesallUserSql());
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesNoStartDateYesEndDateAllUser(Connection connection,String enddate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesNoStartdateYesEnddateYesallUserSql());
-    prSt.setString(1, enddate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateNoEndDateAllUser(Connection connection,String startdate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateNoEnddateYesallUserSql());
-    prSt.setString(1, startdate);
-    return prSt;
-  }
-
-  public PreparedStatement getQueriesYesStartDateYesEndDateAllUser(Connection connection,String startdate,String endate) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchHueQueriesYesStartdateYesEnddateYesallUserSql());
-    prSt.setString(1, startdate);
-    prSt.setString(2, endate);
-    return prSt;
-  }
-
-
-  protected String fetchuserIdfromUsernameSql() {
-    return "select id from auth_user where username=?;";
-
-  }
-
-  protected String fetchHueQueriesNoStartdateNoEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =?;";
-  }
-
-  protected String fetchHueQueriesNoStartdateYesEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND  date_created <= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateNoEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND date_created >= date(?);";
-
-
-  }
-
-  protected String fetchHueQueriesYesStartdateYesEnddateSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id =? AND date_created >= date(?) AND date_created <= date(?);";
-
-  }
-
-  protected String fetchHueQueriesNoStartdateNoEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 ;";
-  }
-
-  protected String fetchHueQueriesNoStartdateYesEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND  date_created <= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateNoEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND date_created >= date(?);";
-
-  }
-
-  protected String fetchHueQueriesYesStartdateYesEnddateYesallUserSql() {
-    return "select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1  AND date_created >= date(?) AND date_created <= date(?);";
-
-  }
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/SqliteQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/SqliteQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/SqliteQuerySet.java
deleted file mode 100644
index 2b1a283..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/pig/savedscriptqueryset/SqliteQuerySet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset;
-
-
-public class SqliteQuerySet extends QuerySet {
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/MysqlQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/MysqlQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/MysqlQuerySet.java
deleted file mode 100644
index dc4e4e4..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/MysqlQuerySet.java
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
-
-
-public class MysqlQuerySet extends QuerySet {
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/OracleQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/OracleQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/OracleQuerySet.java
deleted file mode 100644
index 04ffe70..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/OracleQuerySet.java
+++ /dev/null
@@ -1,28 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
-
-
-public class OracleQuerySet extends QuerySet {
-
-
-  @Override
-  protected String fetchUserDetailSql() {
-    return "select * from auth_user";
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/PostgressQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/PostgressQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/PostgressQuerySet.java
deleted file mode 100644
index 7277211..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/PostgressQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
-
-
-public class PostgressQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/QuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/QuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/QuerySet.java
deleted file mode 100644
index 677b119..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/QuerySet.java
+++ /dev/null
@@ -1,42 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-
-public abstract class QuerySet {
-
-
-  public PreparedStatement getUserDetails(Connection connection) throws SQLException {
-    PreparedStatement prSt = connection.prepareStatement(fetchUserDetailSql());
-    return prSt;
-  }
-
-
-
-  protected String fetchUserDetailSql() {
-    return "select * from auth_user;";
-  }
-
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/SqliteQuerySet.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/SqliteQuerySet.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/SqliteQuerySet.java
deleted file mode 100644
index 7260b6a..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/huequeryset/userdetails/SqliteQuerySet.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.userdetails;
-
-
-public class SqliteQuerySet extends QuerySet {
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobId.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobId.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobId.java
deleted file mode 100644
index 24f12bf..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobId.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration;
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.JobReturnIdModel;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-
-@Path("/returnjobids")
-
-public class CreateJobId {
-
-
-
-
-  @Inject
-  ViewContext view;
-
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response getIdOfMigrationObject(@QueryParam("username") String username,@QueryParam("instance") String instance,@QueryParam("startdate") String startdate,@QueryParam("enddate") String enddate,@QueryParam("jobtype") String jobtype) throws IOException, InvocationTargetException, IllegalAccessException {
-
-    System.out.println("username is "+username+ "instance is "+ instance);
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setIntanceName(instance);
-    migrationresult.setUserNameofhue(username);
-    migrationresult.setProgressPercentage(0);
-    migrationresult.setJobtype(jobtype);
-
-    getResourceManager().create(migrationresult);
-
-    JSONObject response = new JSONObject();
-
-    JobReturnIdModel model=new JobReturnIdModel();
-
-    model.setIdforJob(migrationresult.getId());
-    model.setId(0);
-
-    response.put("returnjobid",model);
-
-    return Response.ok(response).build();
-
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobIdRevertChange.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobIdRevertChange.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobIdRevertChange.java
deleted file mode 100644
index 325e880..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/CreateJobIdRevertChange.java
+++ /dev/null
@@ -1,84 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration;
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.JobReturnIdModel;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-
-@Path("/returnjobidforrevertchanges")
-
-public class CreateJobIdRevertChange {
-
-
-
-
-  @Inject
-  ViewContext view;
-
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response getIdOfMigrationObject(@QueryParam("instance") String instance,@QueryParam("revertdate") String revertdate) throws IOException, InvocationTargetException, IllegalAccessException {
-
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setIntanceName(instance);
-    migrationresult.setProgressPercentage(0);
-    migrationresult.setJobtype("revertchange");
-
-    getResourceManager().create(migrationresult);
-
-    JSONObject response = new JSONObject();
-
-    JobReturnIdModel model=new JobReturnIdModel();
-
-    model.setIdforJob(migrationresult.getId());
-    model.setId(0);
-
-    response.put("returnjobidforrevertchanges",model);
-
-    return Response.ok(response).build();
-
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigration.java
deleted file mode 100644
index b1131bb..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigration.java
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration;
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.migration.hive.historyquery.HiveHistoryStartJob;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery.HiveSavedQueryStartJob;
-import org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob.PigJobStartJob;
-import org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript.PigSavedScriptStartJob;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-
-@Path("/startmigrations")
-
-public class InitiateJobMigration implements Runnable {
-
-  MigrationResponse migrationresult = new MigrationResponse();
-
-  public void run() {
-
-  }
-
-
-  @Inject
-  ViewContext view;
-
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-
-  public Response inititateJOb(@QueryParam("username") String username, @QueryParam("instance") String instance, @QueryParam("startdate") String startdate, @QueryParam("enddate") String enddate, @QueryParam("jobid") String jobid, @QueryParam("jobtype") String jobtype) throws IOException, InvocationTargetException, IllegalAccessException {
-
-    System.out.println("username is " + username + "instance is " + instance);
-
-    JSONObject response = new JSONObject();
-
-    if (jobtype.contains("hivehistoryquerymigration")) {
-
-      new HiveHistoryStartJob(username, instance, startdate, enddate, jobid, view).start();
-    } else if (jobtype.contains("hivesavedquerymigration")) {
-
-      new HiveSavedQueryStartJob(username, instance, startdate, enddate, jobid, view).start();
-
-    } else if (jobtype.contains("pigjobmigration")) {
-
-      new PigJobStartJob(username, instance, startdate, enddate, jobid, view).start();
-
-    } else if (jobtype.contains("pigsavedscriptmigration")) {
-
-      new PigSavedScriptStartJob(username, instance, startdate, enddate, jobid, view).start();
-
-    }
-
-
-    migrationresult.setId(jobid);
-    migrationresult.setProgressPercentage(0);
-
-
-    response.put("startmigration", migrationresult);
-
-    return Response.ok(response).build();
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigrationforRevertchange.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigrationforRevertchange.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigrationforRevertchange.java
deleted file mode 100644
index d323f73..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/InitiateJobMigrationforRevertchange.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration;
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.ambari.view.huetoambarimigration.migration.revertchange.RevertChangeStartJob;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-
-@Path("/startrevertchanges")
-
-public class InitiateJobMigrationforRevertchange implements Runnable{
-
-  MigrationResponse migrationresult=new MigrationResponse();
-
-  public void run(){
-
-  }
-
-
-  @Inject
-  ViewContext view;
-
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-
-  public Response RevertChangeJob(@QueryParam("instance") String instance,@QueryParam("revertdate") String revertdate,@QueryParam("jobid") String jobid) throws IOException, InvocationTargetException, IllegalAccessException {
-
-    JSONObject response = new JSONObject();
-
-    new RevertChangeStartJob(instance,revertdate,jobid, view).start();
-
-    migrationresult.setId(jobid);
-    migrationresult.setProgressPercentage(0);
-    migrationresult.setJobtype("revertchange");
-    migrationresult.setIntanceName(instance);
-
-    new RevertChangeStartJob(instance,revertdate,jobid, view).start();
-
-    response.put("startrevertchanges",migrationresult);
-
-    return Response.ok(response).build();
-
-  }
-
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AllInstanceDetailsAmbari.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AllInstanceDetailsAmbari.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AllInstanceDetailsAmbari.java
deleted file mode 100644
index d93f370..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AllInstanceDetailsAmbari.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.SQLException;
-
-/**
- * sevice class for fetching
- * all the instace details
- */
-
-@Path("/allinstancedetails")
-
-public class AllInstanceDetailsAmbari {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response instancelist() throws IOException, PropertyVetoException, SQLException {
-
-    HiveInstanceDetailsUtility instance = new HiveInstanceDetailsUtility();
-
-    JSONObject response = new JSONObject();
-    response.put("allinstancedetails", instance.getAllInstancedetails(view));
-    return Response.ok(response).build();
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariDatabaseCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariDatabaseCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariDatabaseCheck.java
deleted file mode 100644
index 08d148b..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariDatabaseCheck.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-
-/**
- * Service class to check ambari database
- */
-
-@Path("/ambaridatabases")
-
-public class AmbariDatabaseCheck {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response ambariDatabase() throws IOException {
-    JSONObject response = new JSONObject();
-    try {
-      response.put("ambaridatabase", ConfigurationCheckImplementation.checkAmbariDatbaseConection(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")));
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariWebHdfsCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariWebHdfsCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariWebHdfsCheck.java
deleted file mode 100644
index 0bc7e4b..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/AmbariWebHdfsCheck.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.utils.ServiceFormattedException;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.WebApplicationException;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.net.URISyntaxException;
-
-/**
- * Service class Ambari Webhdfs Check
- */
-@Path("/ambariwebhdfsurls")
-public class AmbariWebHdfsCheck {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response ambariWebHdfs() throws IOException, URISyntaxException {
-    JSONObject response = new JSONObject();
-    try {
-      response.put("ambariwebhdfsurl", ConfigurationCheckImplementation.checkNamenodeURIConnectionforambari(view.getProperties().get("namenode_URI_Ambari")));
-      return Response.ok(response).build();
-    } catch (WebApplicationException ex) {
-      throw ex;
-    } catch (Exception ex) {
-      throw new ServiceFormattedException(ex.getMessage(), ex);
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/CheckProgresStatus.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/CheckProgresStatus.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/CheckProgresStatus.java
deleted file mode 100644
index 8d70ddb..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/CheckProgresStatus.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.QueryParam;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.io.IOException;
-import java.lang.reflect.InvocationTargetException;
-
-/**
- * Service class to check progress
- */
-
-@Path("/checkprogresses")
-
-public class CheckProgresStatus {
-
-  @Inject
-  ViewContext view;
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager() {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-
-  public Response checkProgressStatus(@QueryParam("jobid") String jobid) throws IOException, InvocationTargetException, IllegalAccessException, ItemNotFound {
-
-    MigrationResponse mr = null;
-    mr = getResourceManager().read(jobid);
-    JSONObject object = new JSONObject();
-    object.put("checkprogress", mr);
-    return Response.ok(object).build();
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/ConfigurationCheckImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/ConfigurationCheckImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/ConfigurationCheckImplementation.java
deleted file mode 100644
index e61b2ab..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/ConfigurationCheckImplementation.java
+++ /dev/null
@@ -1,134 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.ConfigurationModel;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
-import org.apache.log4j.Logger;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.net.*;
-import java.sql.Connection;
-import java.sql.SQLException;
-
-/**
- * Configuration check Implenetation class
- */
-
-public class ConfigurationCheckImplementation {
-
-  static final Logger logger = Logger.getLogger(ConfigurationCheckImplementation.class);
-
-  private static String homeDir = System.getProperty("java.io.tmpdir") + "/";
-
-  public static ConfigurationModel checkConfigurationForHue(String hueURL) throws IOException {
-
-    URL url = null;
-    int resonseCode = 0;
-    ConfigurationModel hueHttpUrl = new ConfigurationModel();
-    hueHttpUrl.setId(1);
-    hueHttpUrl.setConfigParameter("hueHtttpUrl");
-    url = new URL(hueURL);
-
-    HttpURLConnection connection = (HttpURLConnection) url.openConnection();
-    connection.setRequestMethod("GET");  //OR  huc.setRequestMethod ("HEAD");
-    connection.connect();
-    resonseCode = connection.getResponseCode();
-    if (resonseCode == 200) {
-      hueHttpUrl.setConfigStatus("Success");
-    } else {
-      hueHttpUrl.setConfigStatus("Failed");
-    }
-    return hueHttpUrl;
-  }
-
-  public static ConfigurationModel checkHueDatabaseConnection(String hueDBDRiver, String hueJdbcUrl, String huedbUsername, String huedbPassword) throws IOException, PropertyVetoException, SQLException {
-
-    ConfigurationModel configmodelHueDB = new ConfigurationModel();
-    configmodelHueDB.setId(4);
-    configmodelHueDB.setConfigParameter("huedb");
-    Connection con = DataSourceHueDatabase.getInstance(hueDBDRiver, hueJdbcUrl, huedbUsername, huedbPassword).getConnection();
-    configmodelHueDB.setConfigStatus("Success");
-    return configmodelHueDB;
-  }
-
-  public static ConfigurationModel checkAmbariDatbaseConection(String ambariDBDriver, String ambariDBJdbcUrl, String ambariDbUsername, String ambariDbPassword) throws IOException, PropertyVetoException, SQLException {
-
-    ConfigurationModel configmodelAmbariDB = new ConfigurationModel();
-    configmodelAmbariDB.setId(5);
-    configmodelAmbariDB.setConfigParameter("ambaridb");
-    Connection con = DataSourceAmbariDatabase.getInstance(ambariDBDriver, ambariDBJdbcUrl, ambariDbUsername, ambariDbPassword).getConnection();
-    configmodelAmbariDB.setConfigStatus("Success");
-    return configmodelAmbariDB;
-  }
-
-  public static String getHomeDir() {
-    return homeDir;
-  }
-
-  public static ConfigurationModel checkNamenodeURIConnectionforambari(String ambariServerNameNode) throws Exception {
-
-    ConfigurationModel configmodelWebhdfsAmbari = new ConfigurationModel();
-    configmodelWebhdfsAmbari.setId(6);
-    configmodelWebhdfsAmbari.setConfigParameter("ambariwebhdfsurl");
-    Configuration conf = new Configuration();
-    conf.set("fs.hdfs.impl",
-      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
-    conf.set("fs.file.impl",
-      org.apache.hadoop.fs.LocalFileSystem.class.getName()
-    );
-    FileSystem fileSystem = FileSystem.get(new URI(ambariServerNameNode), conf);
-
-    if (fileSystem instanceof WebHdfsFileSystem) {
-      configmodelWebhdfsAmbari.setConfigStatus("Success");
-    } else {
-      configmodelWebhdfsAmbari.setConfigStatus("Failed");
-      throw new Exception();
-    }
-    return configmodelWebhdfsAmbari;
-  }
-
-  public static ConfigurationModel checkNamenodeURIConnectionforHue(String hueServerNamenodeURI) throws Exception {
-
-    ConfigurationModel configmodelWebhdfsHue = new ConfigurationModel();
-    configmodelWebhdfsHue.setId(7);
-    configmodelWebhdfsHue.setConfigParameter("huewebhdfsurl");
-    Configuration conf = new Configuration();
-    conf.set("fs.hdfs.impl",
-      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-    );
-    conf.set("fs.file.impl",
-      org.apache.hadoop.fs.LocalFileSystem.class.getName()
-    );
-    FileSystem fileSystem = FileSystem.get(new URI(hueServerNamenodeURI), conf);
-
-    if (fileSystem instanceof WebHdfsFileSystem) {
-      configmodelWebhdfsHue.setConfigStatus("Success");
-    } else {
-      configmodelWebhdfsHue.setConfigStatus("Failed");
-      throw new Exception();
-    }
-    return configmodelWebhdfsHue;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsAmbari.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsAmbari.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsAmbari.java
deleted file mode 100644
index e898a6d..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/configuration/HiveInstanceDetailsAmbari.java
+++ /dev/null
@@ -1,53 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.configuration;
-
-import com.google.inject.Inject;
-import org.apache.ambari.view.ViewContext;
-import org.json.simple.JSONObject;
-import javax.ws.rs.GET;
-import javax.ws.rs.Path;
-import javax.ws.rs.Produces;
-import javax.ws.rs.core.MediaType;
-import javax.ws.rs.core.Response;
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.SQLException;
-
-/**
- * Service class to fetch hiveinstalcedetail
- */
-
-@Path("/hiveinstancedetails")
-
-public class HiveInstanceDetailsAmbari {
-
-  @Inject
-  ViewContext view;
-
-  @GET
-  @Produces(MediaType.APPLICATION_JSON)
-  public Response instancelist() throws IOException, PropertyVetoException, SQLException {
-    HiveInstanceDetailsUtility instance=new HiveInstanceDetailsUtility();
-    JSONObject response = new JSONObject();
-    response.put("hiveinstancedetails",instance.getInstancedetails(view));
-    return Response.ok(response).build();
-  }
-
-}


[10/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

This reverts commit 424afb471ac76da5ca4d4cafb93b103b543b910e.


Project: http://git-wip-us.apache.org/repos/asf/ambari/repo
Commit: http://git-wip-us.apache.org/repos/asf/ambari/commit/283256c8
Tree: http://git-wip-us.apache.org/repos/asf/ambari/tree/283256c8
Diff: http://git-wip-us.apache.org/repos/asf/ambari/diff/283256c8

Branch: refs/heads/branch-2.4
Commit: 283256c83436bfccde5f8c74706e2698265a139d
Parents: 6b6ce80
Author: Alex Antonenko <hi...@gmail.com>
Authored: Wed Jun 29 17:36:27 2016 +0300
Committer: Alex Antonenko <hi...@gmail.com>
Committed: Wed Jun 29 17:36:51 2016 +0300

----------------------------------------------------------------------
 contrib/views/hueambarimigration/pom.xml        | 142 ++--
 .../configurationcheck/ConfigurationCheck.java  | 182 +++++
 .../configurationcheck/ProgressBarStatus.java   |  54 ++
 .../controller/hive/HiveHistoryMigration.java   | 222 ++++++
 .../hive/HiveSavedQueryMigration.java           | 231 ++++++
 .../controller/pig/PigJobMigration.java         | 201 +++++
 .../controller/pig/PigScriptMigration.java      | 208 +++++
 .../controller/revertchange/RevertChange.java   | 217 ++++++
 .../datasource/DataSourceAmbariDatabase.java    |   2 +
 .../datasource/DataSourceHueDatabase.java       |   2 +
 .../historyqueryset/MysqlQuerySetAmbariDB.java  |  46 --
 .../historyqueryset/OracleQuerySetAmbariDB.java |  44 --
 .../PostgressQuerySetAmbariDB.java              |  22 -
 .../hive/historyqueryset/QuerySetAmbariDB.java  |  79 --
 .../instancedetail/MysqlQuerySetAmbariDB.java   |  23 -
 .../instancedetail/OracleQuerySetAmbariDB.java  |  31 -
 .../PostgressQuerySetAmbariDB.java              |  22 -
 .../hive/instancedetail/QuerySetAmbariDB.java   |  48 --
 .../savedqueryset/MysqlQuerySetAmbariDB.java    |  65 --
 .../savedqueryset/OracleQuerySetAmbariDB.java   |  58 --
 .../PostgressQuerySetAmbariDB.java              |  22 -
 .../hive/savedqueryset/QuerySetAmbariDB.java    | 131 ----
 .../instancedetail/MysqlQuerySetAmbariDB.java   |  23 -
 .../instancedetail/OracleQuerySetAmbariDB.java  |  30 -
 .../PostgressQuerySetAmbariDB.java              |  22 -
 .../pig/instancedetail/QuerySetAmbariDB.java    |  39 -
 .../pig/jobqueryset/MysqlQuerySetAmbariDB.java  |  43 -
 .../pig/jobqueryset/OracleQuerySetAmbariDB.java |  41 -
 .../jobqueryset/PostgressQuerySetAmbariDB.java  |  22 -
 .../pig/jobqueryset/QuerySetAmbariDB.java       |  80 --
 .../MysqlQuerySetAmbariDB.java                  |  43 -
 .../OracleQuerySetAmbariDB.java                 |  41 -
 .../PostgressQuerySetAmbariDB.java              |  22 -
 .../savedscriptqueryset/QuerySetAmbariDB.java   |  70 --
 .../hive/historyqueryset/MysqlQuerySet.java     |  23 -
 .../hive/historyqueryset/OracleQuerySet.java    |  61 --
 .../hive/historyqueryset/PostgressQuerySet.java |  22 -
 .../hive/historyqueryset/QuerySet.java          | 130 ----
 .../hive/historyqueryset/SqliteQuerySet.java    |  22 -
 .../hive/savedqueryset/MysqlQuerySet.java       |  23 -
 .../hive/savedqueryset/OracleQuerySet.java      |  65 --
 .../hive/savedqueryset/PostgressQuerySet.java   |  22 -
 .../hive/savedqueryset/QuerySet.java            | 134 ----
 .../hive/savedqueryset/SqliteQuerySet.java      |  22 -
 .../pig/jobqueryset/MysqlQuerySet.java          |  22 -
 .../pig/jobqueryset/OracleQuerySet.java         |  65 --
 .../pig/jobqueryset/PostgressQuerySet.java      |  22 -
 .../huequeryset/pig/jobqueryset/QuerySet.java   | 132 ----
 .../pig/jobqueryset/SqliteQuerySet.java         |  22 -
 .../pig/savedscriptqueryset/MysqlQuerySet.java  |  22 -
 .../pig/savedscriptqueryset/OracleQuerySet.java |  60 --
 .../savedscriptqueryset/PostgressQuerySet.java  |  67 --
 .../pig/savedscriptqueryset/QuerySet.java       | 135 ----
 .../pig/savedscriptqueryset/SqliteQuerySet.java |  24 -
 .../huequeryset/userdetails/MysqlQuerySet.java  |  24 -
 .../huequeryset/userdetails/OracleQuerySet.java |  28 -
 .../userdetails/PostgressQuerySet.java          |  22 -
 .../huequeryset/userdetails/QuerySet.java       |  42 -
 .../huequeryset/userdetails/SqliteQuerySet.java |  22 -
 .../migration/CreateJobId.java                  |  85 --
 .../migration/CreateJobIdRevertChange.java      |  84 --
 .../migration/InitiateJobMigration.java         | 102 ---
 .../InitiateJobMigrationforRevertchange.java    |  85 --
 .../configuration/AllInstanceDetailsAmbari.java |  59 --
 .../configuration/AmbariDatabaseCheck.java      |  62 --
 .../configuration/AmbariWebHdfsCheck.java       |  60 --
 .../configuration/CheckProgresStatus.java       |  70 --
 .../ConfigurationCheckImplementation.java       | 134 ----
 .../HiveInstanceDetailsAmbari.java              |  53 --
 .../HiveInstanceDetailsUtility.java             | 106 ---
 .../configuration/HueDatabaseCheck.java         |  60 --
 .../configuration/HueHttpUrlCheck.java          |  60 --
 .../configuration/HueWebHdfsCheck.java          |  60 --
 .../configuration/PigInstanceDetailsAmbari.java |  61 --
 .../PigInstanceDetailsUtility.java              |  79 --
 .../migration/configuration/UserDetailHue.java  |  58 --
 .../configuration/UserDetailsUtility.java       |  80 --
 .../HiveHistoryMigrationUtility.java            | 255 ------
 ...HiveHistoryQueryMigrationImplementation.java | 551 -------------
 .../hive/historyquery/HiveHistoryStartJob.java  |  74 --
 .../HiveSavedQueryMigrationImplementation.java  | 673 ----------------
 .../HiveSavedQueryMigrationUtility.java         | 281 -------
 .../hive/savedquery/HiveSavedQueryStartJob.java |  77 --
 .../pigjob/PigJobMigrationImplementation.java   | 532 -------------
 .../pig/pigjob/PigJobMigrationUtility.java      | 238 ------
 .../migration/pig/pigjob/PigJobStartJob.java    |  68 --
 .../pig/pigscript/PigSavedScriptStartJob.java   |  70 --
 .../PigScriptMigrationImplementation.java       | 504 ------------
 .../pigscript/PigScriptMigrationUtility.java    | 229 ------
 .../revertchange/RevertChangeStartJob.java      |  70 --
 .../revertchange/RevertChangeUtility.java       | 225 ------
 .../huetoambarimigration/model/PojoHive.java    |  51 ++
 .../huetoambarimigration/model/PojoPig.java     |  72 ++
 .../persistence/DataStoreStorage.java           | 151 ----
 .../persistence/InstanceKeyValueStorage.java    | 132 ----
 .../persistence/KeyValueStorage.java            | 162 ----
 .../persistence/LocalKeyValueStorage.java       |  70 --
 .../persistence/PersistentConfiguration.java    |  52 --
 .../persistence/SmokeTestEntity.java            |  45 --
 .../persistence/Storage.java                    |  78 --
 .../utils/ContextConfigurationAdapter.java      | 260 -------
 .../persistence/utils/FilteringStrategy.java    |  31 -
 .../persistence/utils/Indexed.java              |  36 -
 .../persistence/utils/ItemNotFound.java         |  25 -
 .../utils/OnlyOwnersFilteringStrategy.java      |  33 -
 .../persistence/utils/Owned.java                |  36 -
 .../persistence/utils/PersonalResource.java     |  22 -
 .../persistence/utils/StorageUtil.java          |  94 ---
 .../resources/CRUDResourceManager.java          | 123 ---
 .../resources/PersonalCRUDResourceManager.java  | 105 ---
 .../resources/SharedCRUDResourceManager.java    |  50 --
 .../scripts/MigrationResourceManager.java       |  52 --
 .../scripts/MigrationResourceProvider.java      | 108 ---
 .../scripts/models/ConfigurationModel.java      |  52 --
 .../resources/scripts/models/HiveModel.java     |  51 --
 .../resources/scripts/models/InstanceModel.java |  41 -
 .../scripts/models/JobReturnIdModel.java        |  43 -
 .../scripts/models/MigrationModel.java          | 130 ----
 .../scripts/models/MigrationResponse.java       | 143 ----
 .../resources/scripts/models/PigModel.java      |  72 --
 .../scripts/models/ProgressCheckModel.java      |  60 --
 .../resources/scripts/models/UserModel.java     |  48 --
 .../configurationcheck/ConfFileReader.java      | 199 +++++
 .../service/hive/HiveHistoryQueryImpl.java      | 562 ++++++++++++++
 .../service/hive/HiveSavedQueryImpl.java        | 778 +++++++++++++++++++
 .../service/pig/PigJobImpl.java                 | 563 ++++++++++++++
 .../service/pig/PigScriptImpl.java              | 600 ++++++++++++++
 .../utils/BadRequestFormattedException.java     |  27 -
 .../utils/FilePaginator.java                    |  72 --
 .../MisconfigurationFormattedException.java     |  47 --
 .../utils/NotFoundFormattedException.java       |  27 -
 .../utils/ServiceFormattedException.java        | 101 ---
 .../src/main/resources/WEB-INF/web.xml          | 123 +++
 .../src/main/resources/index.jsp                | 119 +++
 .../src/main/resources/ui/.gitignore            |  33 +
 .../src/main/resources/ui/bower.json            |  15 +
 .../main/resources/ui/checkconfiguration.jsp    |  57 ++
 .../resources/ui/hivehistoryquerymigration.jsp  | 229 ++++++
 .../resources/ui/hivesavedquerymigration.jsp    | 240 ++++++
 .../src/main/resources/ui/homepage.jsp          |  31 +
 .../ui/hueambarimigration-view/.bowerrc         |   4 -
 .../ui/hueambarimigration-view/.editorconfig    |  34 -
 .../ui/hueambarimigration-view/.ember-cli       |  10 -
 .../ui/hueambarimigration-view/.gitignore       |  44 --
 .../ui/hueambarimigration-view/.jshintrc        |  32 -
 .../ui/hueambarimigration-view/.travis.yml      |  22 -
 .../ui/hueambarimigration-view/.watchmanconfig  |   3 -
 .../ui/hueambarimigration-view/README.md        |  67 --
 .../app/adapters/application.js                 |  47 --
 .../ui/hueambarimigration-view/app/app.js       |  38 -
 .../app/components/.gitkeep                     |   0
 .../app/controllers/.gitkeep                    |   0
 .../app/helpers/.gitkeep                        |   0
 .../ui/hueambarimigration-view/app/index.html   |  40 -
 .../hueambarimigration-view/app/models/.gitkeep |   0
 .../app/models/allinstancedetail.js             |  23 -
 .../app/models/ambaridatabase.js                |  24 -
 .../app/models/ambariwebhdfsurl.js              |  24 -
 .../app/models/checkprogress.js                 |  30 -
 .../app/models/getmigrationresultid.js          |  23 -
 .../app/models/hiveinstancedetail.js            |  23 -
 .../app/models/huedatabase.js                   |  24 -
 .../app/models/huehttpurl.js                    |  24 -
 .../app/models/huewebhdfsurl.js                 |  24 -
 .../app/models/piginstancedetail.js             |  23 -
 .../app/models/returnjobid.js                   |  23 -
 .../app/models/returnjobidforrevertchange.js    |  23 -
 .../app/models/startmigration.js                |  23 -
 .../app/models/startrevertchange.js             |  23 -
 .../app/models/usersdetail.js                   |  23 -
 .../ui/hueambarimigration-view/app/resolver.js  |  20 -
 .../ui/hueambarimigration-view/app/router.js    |  38 -
 .../hueambarimigration-view/app/routes/.gitkeep |   0
 .../app/routes/check-configuration.js           | 215 -----
 .../app/routes/home-page.js                     |  21 -
 .../app/routes/home-page/hive-history.js        | 106 ---
 .../app/routes/home-page/hive-saved-query.js    | 102 ---
 .../app/routes/home-page/pig-job.js             | 102 ---
 .../app/routes/home-page/pig-script.js          | 103 ---
 .../app/routes/home-page/revert-change.js       |  97 ---
 .../hueambarimigration-view/app/routes/index.js |  24 -
 .../app/serializers/application.js              |  19 -
 .../hueambarimigration-view/app/styles/app.css  |  25 -
 .../hueambarimigration-view/app/styles/app.scss |  18 -
 .../app/templates/application.hbs               |  24 -
 .../app/templates/check-configuration.hbs       | 153 ----
 .../app/templates/home-page.hbs                 |  42 -
 .../app/templates/home-page/hive-history.hbs    | 125 ---
 .../templates/home-page/hive-saved-query.hbs    | 126 ---
 .../app/templates/home-page/pig-job.hbs         | 127 ---
 .../app/templates/home-page/pig-script.hbs      | 127 ---
 .../app/templates/home-page/revert-change.hbs   |  99 ---
 .../ui/hueambarimigration-view/bower.json       |  14 -
 .../config/environment.js                       |  63 --
 .../hueambarimigration-view/ember-cli-build.js  |  61 --
 .../ui/hueambarimigration-view/package.json     |  51 --
 .../public/crossdomain.xml                      |  15 -
 .../hueambarimigration-view/public/robots.txt   |   3 -
 .../ui/hueambarimigration-view/testem.js        |  29 -
 .../ui/hueambarimigration-view/tests/.jshintrc  |  53 --
 .../tests/helpers/destroy-app.js                |  22 -
 .../tests/helpers/module-for-acceptance.js      |  40 -
 .../tests/helpers/resolver.js                   |  28 -
 .../tests/helpers/start-app.js                  |  35 -
 .../ui/hueambarimigration-view/tests/index.html |  48 --
 .../tests/test-helper.js                        |  25 -
 .../ui/hueambarimigration-view/vendor/.gitkeep  |   0
 .../src/main/resources/ui/package.json          |  27 +
 .../src/main/resources/ui/pigjobmigration.jsp   | 233 ++++++
 .../main/resources/ui/pigscriptsmigration.jsp   | 227 ++++++
 .../src/main/resources/ui/revertchange.jsp      | 203 +++++
 .../src/main/resources/view.xml                 |  98 ---
 contrib/views/pom.xml                           |   3 -
 213 files changed, 5742 insertions(+), 13164 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/pom.xml
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/pom.xml b/contrib/views/hueambarimigration/pom.xml
index ea34687..f121735 100644
--- a/contrib/views/hueambarimigration/pom.xml
+++ b/contrib/views/hueambarimigration/pom.xml
@@ -16,32 +16,24 @@
 -->
 <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
          xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
-  <modelVersion>4.0.0</modelVersion>
-  <groupId>org.apache.ambari.contrib.views</groupId>
-  <artifactId>hueambarimigration</artifactId>
-  <version>2.4.0.0.0</version>
-  <name>hueambarimigration</name>
-
   <parent>
     <groupId>org.apache.ambari.contrib.views</groupId>
     <artifactId>ambari-contrib-views</artifactId>
     <version>2.4.0.0.0</version>
   </parent>
-
+  <modelVersion>4.0.0</modelVersion>
+  <artifactId>huetoambari-view</artifactId>
+  <packaging>jar</packaging>
+  <name>Hue To Ambari Migration-view</name>
+  <version>2.4.0.0.0</version>
+  <url>http://maven.apache.org</url>
   <properties>
     <ambari.dir>${project.parent.parent.parent.basedir}</ambari.dir>
   </properties>
+
+
   <dependencies>
-    <dependency>
-      <groupId>com.google.code.gson</groupId>
-      <artifactId>gson</artifactId>
-      <version>2.6.2</version>
-    </dependency>
-    <dependency>
-      <groupId>com.jayway.jsonpath</groupId>
-      <artifactId>json-path</artifactId>
-      <version>2.0.0</version>
-    </dependency>
+
     <dependency>
       <groupId>org.easymock</groupId>
       <artifactId>easymock</artifactId>
@@ -51,12 +43,14 @@
     <dependency>
       <groupId>org.apache.ambari</groupId>
       <artifactId>ambari-views</artifactId>
-      <version>[1.7.0.0,)</version>
+      <version>2.4.0.0.0</version>
     </dependency>
+
     <dependency>
       <groupId>javax.servlet</groupId>
       <artifactId>servlet-api</artifactId>
       <version>2.5</version>
+
     </dependency>
     <dependency>
       <groupId>org.apache.hadoop</groupId>
@@ -116,41 +110,9 @@
       <groupId>c3p0</groupId>
       <artifactId>c3p0</artifactId>
       <version>0.9.1.2</version>
+
     </dependency>
-    <dependency>
-      <groupId>com.jayway.jsonpath</groupId>
-      <artifactId>json-path</artifactId>
-      <version>2.0.0</version>
-    </dependency>
-    <dependency>
-      <groupId>com.google.inject</groupId>
-      <artifactId>guice</artifactId>
-    </dependency>
-    <dependency>
-      <groupId>com.sun.jersey.contribs</groupId>
-      <artifactId>jersey-multipart</artifactId>
-      <version>1.18</version>
-    </dependency>
-    <dependency>
-      <groupId>com.sun.jersey</groupId>
-      <artifactId>jersey-client</artifactId>
-      <version>1.8</version>
-    </dependency>
-    <dependency>
-      <groupId>com.sun.jersey</groupId>
-      <artifactId>jersey-core</artifactId>
-      <version>1.18.1</version>
-    </dependency>
-    <dependency>
-      <groupId>com.sun.jersey</groupId>
-      <artifactId>jersey-json</artifactId>
-      <version>1.9</version>
-    </dependency>
-    <dependency>
-      <groupId>com.googlecode.json-simple</groupId>
-      <artifactId>json-simple</artifactId>
-      <version>1.1.1</version>
-    </dependency>
+
   </dependencies>
 
   <build>
@@ -172,7 +134,7 @@
         <configuration>
           <nodeVersion>v0.12.2</nodeVersion>
           <npmVersion>1.4.8</npmVersion>
-          <workingDirectory>${project.basedir}/src/main/resources/ui/hueambarimigration-view/</workingDirectory>
+          <workingDirectory>${project.basedir}/src/main/resources/ui</workingDirectory>
         </configuration>
         <executions>
           <execution>
@@ -189,30 +151,7 @@
               <goal>npm</goal>
             </goals>
             <configuration>
-              <arguments>install --python="${project.basedir}/../src/main/unix/ambari-python-wrap" --unsafe-perm
-              </arguments>
-            </configuration>
-          </execution>
-        </executions>
-      </plugin>
-
-      <plugin>
-        <artifactId>exec-maven-plugin</artifactId>
-        <groupId>org.codehaus.mojo</groupId>
-        <version>1.3.2</version>
-        <executions>
-          <execution>
-            <id>Hueambarimigration-build</id>
-            <phase>generate-sources</phase>
-            <goals>
-              <goal>exec</goal>
-            </goals>
-            <configuration>
-              <workingDirectory>${basedir}/src/main/resources/ui/hueambarimigration-view</workingDirectory>
-              <executable>node/node</executable>
-              <arguments>
-                <argument>node_modules/.bin/ember</argument>
-                <argument>build</argument>
+              <arguments>install --python="${project.basedir}/src/main/unix/ambari-python-wrap" --unsafe-perm
               </arguments>
             </configuration>
           </execution>
@@ -242,20 +181,61 @@
 
       <resource>
         <directory>src/main/resources</directory>
-        <filtering>false</filtering>
+        <filtering>true</filtering>
         <includes>
-          <include>META-INF/**/*</include>
+          <include>index.jsp</include>
+          <include>image/*.*</include>
           <include>view.xml</include>
           <include>view.log4j.properties</include>
+          <include>ui/*.*</include>
+          <include>WEB-INF/web.xml</include>
         </includes>
       </resource>
 
+
+      <resource>
+        <directory>src/main/resources/ui/bower_components/bootstrap/dist/css/</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/css</targetPath>
+      </resource>
+
+      <resource>
+        <directory>src/main/resources/ui/bower_components/bootstrap/dist/fonts/</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/fonts</targetPath>
+      </resource>
+
+
+      <resource>
+        <directory>src/main/resources/ui/bower_components/eonasdan-bootstrap-datetimepicker/build/css</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/css</targetPath>
+      </resource>
+
+      <resource>
+        <directory>src/main/resources/ui//bower_components/moment/min/</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/js</targetPath>
+      </resource>
+
+
       <resource>
-        <directory>src/main/resources/ui/hueambarimigration-view/dist</directory>
+        <directory>src/main/resources/ui/bower_components/eonasdan-bootstrap-datetimepicker/build/js</directory>
         <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/js</targetPath>
       </resource>
 
       <resource>
+        <directory>src/main/resources/ui/bower_components/jquery/dist/</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/js</targetPath>
+      </resource>
+      <resource>
+        <directory>src/main/resources/ui/bower_components/bootstrap/dist/js</directory>
+        <filtering>false</filtering>
+        <targetPath>${project.build.outputDirectory}/js</targetPath>
+      </resource>
+      <resource>
         <targetPath>WEB-INF/lib</targetPath>
         <filtering>false</filtering>
         <directory>target/lib</directory>
@@ -263,4 +243,4 @@
     </resources>
   </build>
 
-</project>
\ No newline at end of file
+</project>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java
new file mode 100644
index 0000000..4c2f4ad
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ConfigurationCheck.java
@@ -0,0 +1,182 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.configurationcheck;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+
+import org.apache.ambari.view.ViewContext;
+
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+
+import org.apache.ambari.view.huetoambarimigration.service.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.log4j.Logger;
+
+
+public class ConfigurationCheck extends HttpServlet {
+  private static final long serialVersionUID = 1L;
+
+  ViewContext view;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+  }
+
+  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+
+    final Logger logger = Logger.getLogger(ConfigurationCheck.class);
+    response.setContentType("text/html");
+    PrintWriter out = response.getWriter();
+
+    out.println("<table class=\"table\">");
+    out.println("<thead><tr><th>Service</th><th>Status</th></tr></thead>");
+    out.println("<tbody>");
+
+    if (ConfFileReader.checkConfigurationForHue(view.getProperties().get("Hue_URL"))) {
+      logger.info("Hue URl connection:- Success");
+      out.println("<tr class=\"success\">");
+      out.println("<td><h6>" + "Ambari" + "</h6></td>");
+      out.println("<td><h6>" + "OK" + "</h6></td>");
+      out.println("</tr>");
+    } else {
+      logger.info("Hue URl connection:- Failed");
+      out.println("<tr class=\"danger\">");
+      out.println("<td><h6>" + "Ambari" + "</h6></td>");
+      out.println("<td><h6>" + "ERROR" + "</h6></td>");
+      out.println("</tr>");
+    }
+
+    if (ConfFileReader.checkConfigurationForAmbari(view.getProperties().get("Ambari_URL"))) {
+
+      logger.info("Ambari URl connection:- Success");
+      out.println("<tr class=\"success\">");
+      out.println("<td><h6>" + "Hue" + "</h6></td>");
+      out.println("<td><h6>" + "OK" + "</h6></td>");
+      out.println("</tr>");
+
+    } else {
+
+      logger.info("Ambari URl connection:- Failed");
+      out.println("<tr class=\"danger\">");
+      out.println("<td><h6>" + "Hue" + "</h6></td>");
+      out.println("<td><h6>" + "ERROR" + "</h6></td>");
+      out.println("</tr>");
+
+    }
+
+    if (ConfFileReader.checkAmbariDatbaseConection(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword"))) {
+
+      logger.info("Ambari Database connection:- Success");
+      out.println("<tr class=\"success\">");
+      out.println("<td><h6>" + "Ambari Database" + "</h6></td>");
+      out.println("<td><h6>" + "OK" + "</h6></td>");
+      out.println("</tr>");
+
+    } else {
+
+      logger.info("Ambari Database connection:- Failed");
+      out.println("<tr class=\"danger\">");
+      out.println("<td><h6>" + "Ambari Database" + "</h6></td>");
+      out.println("<td><h6>" + "ERROR" + "</h6></td>");
+      out.println("</tr>");
+
+    }
+    if (ConfFileReader.checkHueDatabaseConnection(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword"))) {
+
+      logger.info("Hue Database connection:- Success");
+      out.println("<tr class=\"success\">");
+      out.println("<td><h6>" + "Hue Database" + "</h6></td>");
+      out.println("<td><h6>" + "OK" + "</h6></td>");
+      out.println("</tr>");
+
+    } else {
+
+      logger.info("Hue Database connection:- Failed");
+      out.println("<tr class=\"danger\">");
+      out.println("<td><h6>" + "Hue Database" + "</h6></td>");
+      out.println("<td><h6>" + "ERROR" + "</h6></td>");
+      out.println("</tr>");
+
+    }
+
+    try {
+
+      if (ConfFileReader.checkNamenodeURIConnectionforambari(view.getProperties().get("namenode_URI_Ambari"))) {
+
+        logger.info("Web hdfs Access to ambari:- Success");
+        out.println("<tr class=\"success\">");
+        out.println("<td><h6>" + "namenodeURIAmbari" + "</h6></td>");
+        out.println("<td><h6>" + "OK" + "</h6></td>");
+        out.println("</tr>");
+
+      } else {
+
+        logger.info("Web hdfs Access to ambari:- Failed");
+        out.println("<tr class=\"danger\">");
+        out.println("<td><h6>" + "namenodeURIAmbari" + "</h6></td>");
+        out.println("<td><h6>" + "ERROR" + "</h6></td>");
+        out.println("</tr>");
+
+      }
+    } catch (URISyntaxException e) {
+      logger.error("Error in accessing Webhdfs of Ambari: ", e);
+    }
+
+    try {
+      if (ConfFileReader.checkNamenodeURIConnectionforHue(view.getProperties().get("namenode_URI_Hue"))) {
+
+        logger.info("Web hdfs Access to hue:- Success");
+        out.println("<tr class=\"success\">");
+        out.println("<td><h6>" + "namenodeURIHue" + "</h6></td>");
+        out.println("<td><h6>" + "OK" + "</h6></td>");
+        out.println("</tr>");
+
+      } else {
+
+        logger.info("Web hdfs Access to hue:- Failed");
+        out.println("<tr class=\"danger\">");
+        out.println("<td><h6>" + "namenodeURIHue" + "</h6></td>");
+        out.println("<td><h6>" + "ERROR" + "</h6></td>");
+        out.println("</tr>");
+
+      }
+    } catch (URISyntaxException e) {
+      logger.error("Error in accessing Webhdfs of Hue: " , e);
+    }
+
+    out.println("</tbody></table>");
+
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java
new file mode 100644
index 0000000..40d63df
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/configurationcheck/ProgressBarStatus.java
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.configurationcheck;
+
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+
+public class ProgressBarStatus extends HttpServlet {
+
+  private static final long serialVersionUID = 1L;
+
+  public static String TASK_PROGRESS_VARIABLE = "task_progress_session";
+
+
+  protected void doGet(HttpServletRequest request, HttpServletResponse response) throws ServletException, IOException {
+
+    HttpSession session = request.getSession(true);
+    Integer param = (Integer) session.getAttribute(TASK_PROGRESS_VARIABLE);
+
+    if (param == null) {
+      param = 0;
+    }
+
+    response.setContentType("text/html");
+    PrintWriter out = response.getWriter();
+    out.println(param + "%");
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java
new file mode 100755
index 0000000..46be3fe
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveHistoryMigration.java
@@ -0,0 +1,222 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.hive;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.service.hive.HiveHistoryQueryImpl;
+
+public class HiveHistoryMigration extends HttpServlet {
+
+
+  private static final long serialVersionUID = 1031422249396784970L;
+  ViewContext view;
+
+  private String startDate;
+  private String endDate;
+  private String instance;
+  private String username;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+  }
+
+  public void doGet(HttpServletRequest req, HttpServletResponse resp)
+    throws ServletException, IOException {
+
+    HttpSession session = req.getSession(true);
+    final Logger logger = Logger.getLogger(HiveHistoryMigration.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
+
+    /* fetching the variable from the client */
+    username = req.getParameter("username");
+    startDate = req.getParameter("startdate");
+    endDate = req.getParameter("enddate");
+    instance = req.getParameter("instance");
+
+    logger.info("--------------------------------------");
+    logger.info("Hive History query Migration started");
+    logger.info("--------------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + username);
+    logger.info("hue username is : " + instance);
+
+    int maxCountOfAmbariDb, i = 0;
+    String time = null;
+    Long epochTime = null;
+    String dirNameforHiveHistroy;
+
+    HiveHistoryQueryImpl hiveHistoryQueryImpl = new HiveHistoryQueryImpl();// creating objects of HiveHistroy implementation
+
+    String[] hiveQuery = new String[1000000];
+
+    try {
+
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();
+
+      hiveQuery = hiveHistoryQueryImpl.fetchFromHue(username, startDate, endDate, connectionHuedb);
+
+		   /* if No hive query selected from Hue Database according to our search criteria */
+
+      if (hiveQuery[i] == null) {
+
+        logger.info("No queries has been selected acccording to your criteria");
+        resp.setContentType("text/html");
+        PrintWriter out = resp.getWriter();
+        out.println("<br>");
+        out.println("<h4>No queries selected according to your criteria</h4>");
+
+      } else {
+        /* If Hive queries are selected based on our search criteria */
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+        connectionAmbaridb.setAutoCommit(false);
+
+        // for each queries fetched from Hue database//
+
+        for (i = 0; hiveQuery[i] != null; i++) {
+
+          float calc = ((float) (i + 1)) / hiveQuery.length * 100;
+          int progressPercentage = Math.round(calc);
+
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          logger.info("_____________________");
+          logger.info("Loop No." + (i + 1));
+          logger.info("_____________________");
+          logger.info("Hue query that has been fetched" + hiveQuery[i]);
+          int id = 0;
+
+          id = hiveHistoryQueryImpl.fetchInstanceTablename(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); // feching table name according to the given instance name
+
+          logger.info("Table name has been fetched from intance name");
+
+          hiveHistoryQueryImpl.writetoFileQueryhql(hiveQuery[i], ConfFileReader.getHomeDir());// writing to .hql file to a temp file on local disk
+
+          logger.info(".hql file created in Temp directory");
+
+          hiveHistoryQueryImpl.writetoFileLogs(ConfFileReader.getHomeDir());// writing to logs file to a temp file on local disk
+
+          logger.info("Log file created in Temp directory");
+
+          maxCountOfAmbariDb = (hiveHistoryQueryImpl.fetchMaximumIdfromAmbaridb(view.getProperties().get("ambaridrivername"), connectionAmbaridb, id) + 1);// fetching the maximum count for ambari db to insert
+
+          time = hiveHistoryQueryImpl.getTime();// getting the system current time.
+
+          epochTime = hiveHistoryQueryImpl.getEpochTime();// getting system time as epoch format
+
+          dirNameforHiveHistroy = "/user/admin/hive/jobs/hive-job-" + maxCountOfAmbariDb + "-" + time + "/";// creating the directory name
+
+          logger.info("Directory name where .hql will be saved: " + dirNameforHiveHistroy);
+
+          hiveHistoryQueryImpl.insertRowinAmbaridb(view.getProperties().get("ambaridrivername"), dirNameforHiveHistroy, maxCountOfAmbariDb, epochTime, connectionAmbaridb, id, instance, i);// inserting in ambari database
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+            logger.info("kerberose enabled");
+            hiveHistoryQueryImpl.createDirKerberorisedSecured(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in kerborized secured hdfs
+            logger.info("Directory created in hdfs");
+            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to kerborized hdfs
+            hiveHistoryQueryImpl.putFileinHdfsKerborizedSecured(ConfFileReader.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to kerborized hdfs
+          } else {
+
+            logger.info("kerberose not enabled");
+            hiveHistoryQueryImpl.createDir(dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
+            logger.info("Directory created in hdfs");
+            hiveHistoryQueryImpl.putFileinHdfs(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the .hql file to hdfs
+            hiveHistoryQueryImpl.putFileinHdfs(ConfFileReader.getHomeDir() + "logs", dirNameforHiveHistroy, view.getProperties().get("namenode_URI_Ambari"));// copying the log file to hdfs
+          }
+
+        }
+        connectionAmbaridb.commit();
+
+      }
+    } catch (SQLException e) {
+      logger.error("Sql exception in ambari database: ", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.error("Sql statement are Rolledback");
+      } catch (SQLException e1) {
+        logger.error("Sql rollback exception in ambari database",
+          e1);
+      }
+    } catch (ClassNotFoundException e) {
+      logger.error("Class not found :- " ,e);
+    } catch (ParseException e) {
+      logger.error("Parse Exception : " ,e);
+    } catch (URISyntaxException e) {
+      logger.error("URI Syntax Exception: " ,e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: " ,e);
+    } finally {
+      if (connectionAmbaridb != null) try {
+        connectionAmbaridb.close();
+      } catch (SQLException e) {
+        logger.error("Exception in closing the connection :" ,e);
+      }
+    }
+    //deleteing the temprary files that are created while execution
+    hiveHistoryQueryImpl.deleteFileQueryhql(ConfFileReader.getHomeDir());
+    hiveHistoryQueryImpl.deleteFileQueryLogs(ConfFileReader.getHomeDir());
+
+    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+    logger.info("------------------------------");
+    logger.info("Hive History query Migration Ends");
+    logger.info("------------------------------");
+
+    /* servlet returned to client */
+    resp.setContentType("text/html");
+    PrintWriter out = resp.getWriter();
+    out.println("<br>");
+    out.println("<h4>" + i + " Query has been migrated to  " + instance + "</h4>");
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java
new file mode 100644
index 0000000..d873744
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/hive/HiveSavedQueryMigration.java
@@ -0,0 +1,231 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.hive;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.*;
+import javax.servlet.http.*;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.service.hive.HiveSavedQueryImpl;
+
+public class HiveSavedQueryMigration extends HttpServlet {
+
+  private static final long serialVersionUID = 1031422249396784970L;
+
+  ViewContext view;
+  private String startDate;
+  private String endDate;
+  private String instance;
+  private String userName;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+  }
+
+  public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+
+    HttpSession session = req.getSession(true);
+    final Logger logger = Logger.getLogger(HiveSavedQueryMigration.class);
+
+    Connection connectionAmbaridb = null;
+    Connection connectionHuedb = null;
+
+    /* fetching from servlet */
+    userName = req.getParameter("username");
+    startDate = req.getParameter("startdate");
+    endDate = req.getParameter("enddate");
+    instance = req.getParameter("instance");
+
+    int i = 0;
+
+    logger.info("-------------------------------------");
+    logger.info("Hive saved query Migration started");
+    logger.info("-------------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + instance);
+    logger.info("hue username is : " + userName);
+
+    HiveSavedQueryImpl hivesavedqueryimpl = new HiveSavedQueryImpl();/* creating Implementation object  */
+
+    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb;
+    String time = null;
+    Long epochtime = null;
+    String dirNameforHiveSavedquery;
+    ArrayList<PojoHive> dbpojoHiveSavedQuery = new ArrayList<PojoHive>();
+
+    try {
+
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
+
+      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(userName, startDate, endDate, connectionHuedb); /* fetching data from hue db and storing it in to a model */
+
+      if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
+
+        logger.info("no Hive saved query has been selected from hue according to your criteria of searching");
+        resp.setContentType("text/html");
+        PrintWriter out = resp.getWriter();
+        out.println("<br>");
+        out.println("<h4>No queries selected according to your criteria</h4>");
+
+      } else {
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
+        connectionAmbaridb.setAutoCommit(false);
+
+        for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
+
+          logger.info("_____________________");
+          logger.info("Loop No." + (i + 1));
+          logger.info("_____________________");
+
+          float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
+          int progressPercentage = Math.round(calc);
+
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          logger.info("query fetched from hue:-  " + dbpojoHiveSavedQuery.get(i).getQuery());
+
+          int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); /* fetching the instance table name for hive saved query  from the given instance name */
+
+          int tableIdHistoryHive = hivesavedqueryimpl.fetchInstanceTablenameHiveHistory(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance); /* fetching the instance table name for hive history query from the given instance name */
+
+          logger.info("Table name are fetched from instance name.");
+
+          hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfFileReader.getHomeDir()); /* writing hive query to a local file*/
+
+          hivesavedqueryimpl.writetoFileLogs(ConfFileReader.getHomeDir());/* writing logs to localfile */
+
+          logger.info(".hql and logs file are saved in temporary directory");
+
+          maxcountForHivehistroryAmbaridb = (hivesavedqueryimpl.fetchMaxdsidFromHiveHistory(view.getProperties().get("ambaridrivername"), connectionAmbaridb, tableIdHistoryHive) + 1);/* fetching the maximum ds_id from hive history table*/
+
+          maxCountforSavequeryAmbaridb = (hivesavedqueryimpl.fetchMaxidforSavedQueryHive(view.getProperties().get("ambaridrivername"), connectionAmbaridb, tableIdSavedQuery) + 1);/* fetching the maximum ds_id from hive saved query table*/
+
+          time = hivesavedqueryimpl.getTime();/* getting system time */
+
+          epochtime = hivesavedqueryimpl.getEpochTime();/* getting epoch time */
+
+
+          dirNameforHiveSavedquery = "/user/admin/hive/jobs/hive-job-" + maxcountForHivehistroryAmbaridb + "-"
+            + time + "/"; // creating hdfs directory name
+
+          logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
+
+
+          hivesavedqueryimpl.insertRowHiveHistory(view.getProperties().get("ambaridrivername"), dirNameforHiveSavedquery, maxcountForHivehistroryAmbaridb, epochtime, connectionAmbaridb, tableIdHistoryHive, instance, i);// inserting to hive history table
+
+          logger.info("Row inserted in Hive History table.");
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+            logger.info("Kerberose Enabled");
+            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs in kerborized cluster
+            hivesavedqueryimpl.putFileinHdfsSecured(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs in kerberoroized cluster
+            hivesavedqueryimpl.putFileinHdfsSecured(ConfFileReader.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs in kerberoroized cluster
+
+          } else {
+
+            logger.info("Kerberose Not Enabled");
+            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
+            hivesavedqueryimpl.putFileinHdfs(ConfFileReader.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs directory
+            hivesavedqueryimpl.putFileinHdfs(ConfFileReader.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs
+          }
+
+          //inserting into hived saved query table
+          hivesavedqueryimpl.insertRowinSavedQuery(view.getProperties().get("ambaridrivername"), maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i);
+
+        }
+        connectionAmbaridb.commit();
+
+      }
+
+
+    } catch (SQLException e) {
+
+      logger.error("SQL exception: ", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("roll back done");
+      } catch (SQLException e1) {
+        logger.error("Rollback error: ", e1);
+
+      }
+    } catch (ClassNotFoundException e1) {
+      logger.error("Class not found : " , e1);
+    } catch (ParseException e) {
+      logger.error("ParseException: " , e);
+    } catch (URISyntaxException e) {
+      logger.error("URISyntaxException: " , e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException:" , e);
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("Error in connection close", e);
+        }
+    }
+
+
+    hivesavedqueryimpl.deleteFileQueryhql(ConfFileReader.getHomeDir());
+    hivesavedqueryimpl.deleteFileQueryLogs(ConfFileReader.getHomeDir());
+    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+
+    logger.info("-------------------------------");
+    logger.info("Hive saved query Migration end");
+    logger.info("--------------------------------");
+
+    resp.setContentType("text/html");
+    PrintWriter out = resp.getWriter();
+    out.println("<br>");
+    out.println("<h4>" + i + " Saved query has been migrated to  " + instance + "</h4>");
+  }
+}
+
+
+
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java
new file mode 100644
index 0000000..4b6afdb
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigJobMigration.java
@@ -0,0 +1,201 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.pig;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.Date;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.service.*;
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.service.pig.PigJobImpl;
+
+public class PigJobMigration extends HttpServlet {
+
+  private static final long serialVersionUID = 1031422249396784970L;
+  ViewContext view;
+  int i = 0;
+  private String userName;
+  private String startDate;
+  private String endDate;
+  private String instance;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+  }
+
+  public void doGet(HttpServletRequest req, HttpServletResponse resp) throws ServletException, IOException {
+
+    HttpSession session = req.getSession(true);
+    final Logger logger = Logger.getLogger(PigJobMigration.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
+
+    // fetchinf data from the clients
+    userName = req.getParameter("username");
+    startDate = req.getParameter("startdate");
+    endDate = req.getParameter("enddate");
+    instance = req.getParameter("instance");
+
+    logger.info("------------------------------");
+    logger.info("Pig Jobs Migration started");
+    logger.info("------------------------------");
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + userName);
+    logger.info("hue username is : " + instance);
+
+    PigJobImpl pigjobimpl = new PigJobImpl();// creating the implementation object
+    int maxCountforPigScript = 0;
+
+    String time = null, timeIndorder = null;
+    Long epochtime = null;
+    String pigJobDirName;
+    ArrayList<PojoPig> pigJobDbPojo = new ArrayList<PojoPig>();
+
+    try {
+
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
+      pigJobDbPojo = pigjobimpl.fetchFromHueDB(userName, startDate, endDate, connectionHuedb);// fetching the PigJobs details from hue
+
+			/*No Pig Job details has been fetched accordring to search criteria*/
+      if (pigJobDbPojo.size() == 0) {
+
+        logger.info("no Pig Job has been selected from hue according to your criteria of searching");
+        resp.setContentType("text/html");
+        PrintWriter out = resp.getWriter();
+        out.println("<br>");
+        out.println("<h4>No Pig Job  selected according to your criteria</h4>");
+
+      } else {
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+        connectionAmbaridb.setAutoCommit(false);
+
+        for (i = 0; i < pigJobDbPojo.size(); i++) {
+
+          float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
+          int progressPercentage = Math.round(calc);
+
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          logger.info("Loop No." + (i + 1));
+          logger.info("________________");
+          logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
+
+          int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance);
+          maxCountforPigScript = (pigjobimpl.fetchMaxIdforPigJob(view.getProperties().get("ambaridrivername"), connectionAmbaridb, fetchPigTablenameInstance) + 1);
+
+          time = pigjobimpl.getTime();
+          timeIndorder = pigjobimpl.getTimeInorder();
+          epochtime = pigjobimpl.getEpochTime();
+
+          pigJobDirName = "/user/admin/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
+
+          pigjobimpl.insertRowPigJob(view.getProperties().get("ambaridrivername"), pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i);
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+
+            pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+
+          } else {
+
+            pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
+
+          }
+
+          logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
+
+        }
+        connectionAmbaridb.commit();
+      }
+
+    } catch (SQLException e) {
+      logger.error("sql exception in ambari database:", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("roll back done");
+      } catch (SQLException e1) {
+        logger.error("roll back  exception:",e1);
+      }
+    } catch (ClassNotFoundException e2) {
+      logger.error("class not found exception:",e2);
+    } catch (ParseException e) {
+      logger.error("ParseException: " ,e);
+    } catch (URISyntaxException e) {
+      logger.error("URISyntaxException" ,e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException" ,e);
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("connection closing exception ", e);
+        }
+    }
+
+    logger.info("------------------------------");
+    logger.info("Pig Job Migration End");
+    logger.info("------------------------------");
+
+    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+    resp.setContentType("text/html");
+    PrintWriter out = resp.getWriter();
+    out.println("<br>");
+    out.println("<h4>" + i + " Pig jobs has been migrated to  "
+      + instance + "</h4>");
+  }
+
+}
+

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java
new file mode 100644
index 0000000..bb803f3
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/pig/PigScriptMigration.java
@@ -0,0 +1,208 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.ambari.view.huetoambarimigration.controller.pig;
+
+import java.beans.PropertyVetoException;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.text.ParseException;
+import java.util.ArrayList;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.ambari.view.huetoambarimigration.service.pig.PigScriptImpl;
+
+public class PigScriptMigration extends HttpServlet {
+
+
+  private static final long serialVersionUID = 1031422249396784970L;
+  ViewContext view;
+  private String startDate;
+  private String endDate;
+  private String instance;
+  private String userName;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+
+  }
+
+  public void doGet(HttpServletRequest req, HttpServletResponse resp)
+    throws ServletException, IOException {
+
+    HttpSession session = req.getSession(true);
+    final Logger logger = Logger.getLogger(PigScriptMigration.class);
+    Connection connectionHuedb = null;
+    Connection connectionAmbaridb = null;
+
+    logger.info("-------------------------------------");
+    logger.info("Pig saved script Migration started");
+    logger.info("-------------------------------------");
+
+    //fethcing data from client
+
+    userName = req.getParameter("username");
+    startDate = req.getParameter("startdate");
+    endDate = req.getParameter("enddate");
+    instance = req.getParameter("instance");
+    int i = 0;
+
+    logger.info("start date: " + startDate);
+    logger.info("enddate date: " + endDate);
+    logger.info("instance is: " + userName);
+    logger.info("hue username is : " + instance);
+
+    //Reading the configuration file
+    PigScriptImpl pigsavedscriptmigration = new PigScriptImpl();
+
+    int maxcountforsavequery = 0, maxcountforpigsavedscript;
+    String time = null, timetobeInorder = null;
+    Long epochTime = null;
+    String dirNameForPigScript, completeDirandFilePath, pigscriptFilename="";
+    int pigInstanceTableName;
+
+    ArrayList<PojoPig> dbpojoPigSavedscript = new ArrayList<PojoPig>();
+
+    try {
+      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
+      dbpojoPigSavedscript = pigsavedscriptmigration.fetchFromHueDatabase(userName, startDate, endDate, connectionHuedb, view.getProperties().get("huedrivername"));// Fetching Pig script details from Hue DB
+
+      /* If No Pig Script has been fetched from Hue db according to our search criteria*/
+      if (dbpojoPigSavedscript.size() == 0) {
+
+        logger.info("no Pig script has been selected from hue according to your criteria of searching");
+        resp.setContentType("text/html");
+        PrintWriter out = resp.getWriter();
+        out.println("<br>");
+        out.println("<h4>No Pig Script selected according to your criteria</h4>");
+
+      } else {
+
+        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
+        connectionAmbaridb.setAutoCommit(false);
+        logger.info("loop will continue for " + dbpojoPigSavedscript.size() + "times");
+
+        //for each pig script found in Hue Database
+
+        for (i = 0; i < dbpojoPigSavedscript.size(); i++) {
+
+
+          float calc = ((float) (i + 1)) / dbpojoPigSavedscript.size() * 100;
+          int progressPercentage = Math.round(calc);
+
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          logger.info("Loop No." + (i + 1));
+          logger.info("________________");
+          logger.info("the title of script:  " + dbpojoPigSavedscript.get(i).getTitle());
+
+          pigInstanceTableName = pigsavedscriptmigration.fetchInstanceTablenamePigScript(view.getProperties().get("ambaridrivername"), connectionAmbaridb, instance);// finding the table name in ambari from the given instance
+
+          maxcountforpigsavedscript = (pigsavedscriptmigration.fetchmaxIdforPigSavedScript(view.getProperties().get("ambaridrivername"), connectionAmbaridb, pigInstanceTableName) + 1);// maximum count of the primary key of Pig Script table
+
+          time = pigsavedscriptmigration.getTime();
+
+          timetobeInorder = pigsavedscriptmigration.getTimeInorder();
+
+          epochTime = pigsavedscriptmigration.getEpochTime();
+
+          dirNameForPigScript = "/user/admin/pig/scripts/";
+
+          pigscriptFilename = dbpojoPigSavedscript.get(i).getTitle() + "-" + time + ".pig";
+
+          completeDirandFilePath = dirNameForPigScript + pigscriptFilename;
+
+          pigsavedscriptmigration.writetPigScripttoLocalFile(dbpojoPigSavedscript.get(i).getScript(), dbpojoPigSavedscript.get(i).getTitle(), dbpojoPigSavedscript.get(i).getDt(), ConfFileReader.getHomeDir(), pigscriptFilename);
+
+          pigsavedscriptmigration.insertRowForPigScript(view.getProperties().get("ambaridrivername"), completeDirandFilePath, maxcountforsavequery, maxcountforpigsavedscript, time, timetobeInorder, epochTime, dbpojoPigSavedscript.get(i).getTitle(), connectionAmbaridb, pigInstanceTableName, instance, i);
+
+          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
+            pigsavedscriptmigration.putFileinHdfsSecured(ConfFileReader.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
+          } else {
+            pigsavedscriptmigration.putFileinHdfs(ConfFileReader.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
+          }
+
+          logger.info(dbpojoPigSavedscript.get(i).getTitle() + "Migrated to Ambari");
+
+          pigsavedscriptmigration.deletePigScriptLocalFile(ConfFileReader.getHomeDir(), pigscriptFilename);
+
+        }
+        connectionAmbaridb.commit();
+
+      }
+
+
+    } catch (SQLException e) {
+      logger.error("Sql exception in ambari database", e);
+      try {
+        connectionAmbaridb.rollback();
+        logger.info("rollback done");
+      } catch (SQLException e1) {
+        logger.error("Sql exception while doing roll back", e);
+      }
+    } catch (ClassNotFoundException e2) {
+      logger.error("class not found exception", e2);
+    } catch (ParseException e) {
+      logger.error("ParseException: " , e);
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: " , e);
+    } finally {
+      if (null != connectionAmbaridb)
+        try {
+          connectionAmbaridb.close();
+        } catch (SQLException e) {
+          logger.error("connection close exception: ", e);
+        }
+    }
+
+    session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+
+    resp.setContentType("text/html");
+    PrintWriter out = resp.getWriter();
+    out.println("<br>");
+    out.println("<h4>" + i + " Pig Script has been migrated to " + instance + "</h4>");
+
+    logger.info("----------------------------------");
+    logger.info("Pig saved script Migration ends");
+    logger.info("----------------------------------");
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java
new file mode 100644
index 0000000..2fff19a
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/controller/revertchange/RevertChange.java
@@ -0,0 +1,217 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.controller.revertchange;
+
+import java.beans.PropertyVetoException;
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+import java.util.List;
+
+import javax.servlet.ServletConfig;
+import javax.servlet.ServletContext;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
+import javax.servlet.http.HttpSession;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ProgressBarStatus;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+
+
+public class RevertChange extends HttpServlet {
+
+  private static final long serialVersionUID = 1L;
+  ViewContext view;
+
+  @Override
+  public void init(ServletConfig config) throws ServletException {
+
+    super.init(config);
+    ServletContext context = config.getServletContext();
+    view = (ViewContext) context.getAttribute(ViewContext.CONTEXT_ATTRIBUTE);
+  }
+
+  public boolean stringtoDatecompare(String datefromservlet,
+                                     String datefromfile) throws ParseException {
+
+    SimpleDateFormat formatter = new SimpleDateFormat("yyyy-MM-dd hh:mm:ss");
+    Date date1 = formatter.parse(datefromservlet);
+    Date date2 = formatter.parse(datefromfile);
+    if (date1.compareTo(date2) < 0) {
+      return true;
+    } else {
+      return false;
+    }
+
+  }
+
+  public void removedir(final String dir, final String namenodeuri)
+    throws IOException, URISyntaxException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation
+        .createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class
+              .getName());
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class
+              .getName());
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.delete(src, true);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      e.printStackTrace();
+    }
+  }
+
+  protected void doGet(HttpServletRequest request,
+                       HttpServletResponse response) throws ServletException, IOException {
+
+    final Logger logger = Logger.getLogger(RevertChange.class);
+
+    logger.info("------------------------------");
+    logger.info("Reverting the changes Start:");
+    logger.info("------------------------------");
+
+    HttpSession session = request.getSession(true);
+    String revertDate = request.getParameter("revertdate");
+    String instance = request.getParameter("instance");
+
+    logger.info("Revert Date " + revertDate);
+    logger.info("instance name " + instance);
+
+    BufferedReader br = null;
+    Connection connectionAmbariDatabase = null;
+
+    try {
+      connectionAmbariDatabase = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
+      connectionAmbariDatabase.setAutoCommit(false);
+
+      Statement stmt = null;
+      stmt = connectionAmbariDatabase.createStatement();
+      SAXBuilder builder = new SAXBuilder();
+      File xmlFile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+      try {
+
+        Document document = (Document) builder.build(xmlFile);
+        Element rootNode = document.getRootElement();
+        List list = rootNode.getChildren("RevertRecord");
+
+        for (int i = 0; i < list.size(); i++) {
+
+          float calc = ((float) (i + 1)) / list.size() * 100;
+          int progressPercentage = Math.round(calc);
+          session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, progressPercentage);
+
+          Element node = (Element) list.get(i);
+
+          if (node.getChildText("instance").equals(instance)) {
+
+            if (stringtoDatecompare(revertDate, node.getChildText("datetime").toString())) {
+
+              String sql = node.getChildText("query");
+              logger.info(sql);
+              stmt.executeUpdate(sql);
+              removedir(node.getChildText("dirname").toString(), view.getProperties().get("namenode_URI_Ambari"));
+              logger.info(node.getChildText("dirname").toString()+" deleted");
+
+            }
+
+          }
+
+        }
+
+        connectionAmbariDatabase.commit();
+
+        response.setContentType("text/html");
+        PrintWriter out = response.getWriter();
+        out.println("<br>");
+        out.println("<h4>" + " The change has been revert back for "
+          + instance + "</h4>");
+
+        session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
+
+        logger.info("------------------------------");
+        logger.info("Reverting the changes End");
+        logger.info("------------------------------");
+
+      } catch (IOException e) {
+        logger.error("IOException: ",e);
+      } catch (ParseException e) {
+        logger.error("ParseException: ",e);
+      } catch (JDOMException e) {
+        logger.error("JDOMException: ",e);
+      } catch (URISyntaxException e) {
+        logger.error("URISyntaxException:  ",e);
+      }
+    } catch (SQLException e1) {
+      logger.error("SqlException  ",e1);
+      try {
+        connectionAmbariDatabase.rollback();
+        logger.info("Rollback done");
+      } catch (SQLException e2) {
+        logger.error("SqlException in Rollback  ",e2);
+      }
+    } catch (PropertyVetoException e) {
+      logger.error("PropertyVetoException: ",e);
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
index 0d0fc7d..034c4c6 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceAmbariDatabase.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.view.huetoambarimigration.datasource;
 
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
 import com.mchange.v2.c3p0.ComboPooledDataSource;
 
 import java.beans.PropertyVetoException;

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
index ce8aecd..6e40308 100644
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/DataSourceHueDatabase.java
@@ -18,6 +18,8 @@
 
 package org.apache.ambari.view.huetoambarimigration.datasource;
 
+import org.apache.ambari.view.huetoambarimigration.controller.configurationcheck.ConfigurationCheck;
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
 import com.mchange.v2.c3p0.ComboPooledDataSource;
 
 import java.beans.PropertyVetoException;

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
deleted file mode 100644
index c08f009..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/datasource/queryset/ambariqueryset/hive/historyqueryset/MysqlQuerySetAmbariDB.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.historyqueryset;
-
-/**
- * Overriding methods for Mysql specific queries
- */
-
-public class MysqlQuerySetAmbariDB extends QuerySetAmbariDB {
-
-  @Override
-  protected String getSqlMaxDSidFromTableId(int id) {
-    return "select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";";
-  }
-
-  @Override
-  protected String getTableIdSqlFromInstanceName() {
-    return "select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name=?;";
-  }
-
-  @Override
-  protected String getSqlInsertHiveHistory(int id) {
-    return "INSERT INTO DS_JOBIMPL_" + id + " values (?,'','','','','default',?,0,'','',?,'admin',?,'','job','','','Unknown',?,'','Worksheet');";
-  }
-
-  @Override
-  protected String getRevSql(int id, String maxcount) {
-    return "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount + "';";
-  }
-
-}


[02/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc
deleted file mode 100644
index 08096ef..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.jshintrc
+++ /dev/null
@@ -1,32 +0,0 @@
-{
-  "predef": [
-    "document",
-    "window",
-    "-Promise"
-  ],
-  "browser": true,
-  "boss": true,
-  "curly": true,
-  "debug": false,
-  "devel": true,
-  "eqeqeq": true,
-  "evil": true,
-  "forin": false,
-  "immed": false,
-  "laxbreak": false,
-  "newcap": true,
-  "noarg": true,
-  "noempty": false,
-  "nonew": false,
-  "nomen": false,
-  "onevar": false,
-  "plusplus": false,
-  "regexp": false,
-  "undef": true,
-  "sub": true,
-  "strict": false,
-  "white": false,
-  "eqnull": true,
-  "esnext": true,
-  "unused": true
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml
deleted file mode 100644
index 64533be..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.travis.yml
+++ /dev/null
@@ -1,22 +0,0 @@
----
-language: node_js
-node_js:
-  - "4"
-
-sudo: false
-
-cache:
-  directories:
-    - node_modules
-
-before_install:
-  - npm config set spin false
-  - npm install -g bower
-  - npm install phantomjs-prebuilt
-
-install:
-  - npm install
-  - bower install
-
-script:
-  - npm test

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig
deleted file mode 100644
index e7834e3..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/.watchmanconfig
+++ /dev/null
@@ -1,3 +0,0 @@
-{
-  "ignore_dirs": ["tmp", "dist"]
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md
deleted file mode 100644
index 1d1a14f..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/README.md
+++ /dev/null
@@ -1,67 +0,0 @@
-<!---
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-# Hueambarimigration-view
-
-This README outlines the details of collaborating on this Ember application.
-A short introduction of this app could easily go here.
-
-## Prerequisites
-
-You will need the following things properly installed on your computer.
-
-* [Git](http://git-scm.com/)
-* [Node.js](http://nodejs.org/) (with NPM)
-* [Bower](http://bower.io/)
-* [Ember CLI](http://ember-cli.com/)
-* [PhantomJS](http://phantomjs.org/)
-
-## Installation
-
-* `git clone <repository-url>` this repository
-* change into the new directory
-* `npm install`
-* `bower install`
-
-## Running / Development
-
-* `ember server`
-* Visit your app at [http://localhost:4200](http://localhost:4200).
-
-### Code Generators
-
-Make use of the many generators for code, try `ember help generate` for more details
-
-### Running Tests
-
-* `ember test`
-* `ember test --server`
-
-### Building
-
-* `ember build` (development)
-* `ember build --environment production` (production)
-
-### Deploying
-
-Specify what it takes to deploy your app.
-
-## Further Reading / Useful Links
-
-* [ember.js](http://emberjs.com/)
-* [ember-cli](http://ember-cli.com/)
-* Development Browser Extensions
-  * [ember inspector for chrome](https://chrome.google.com/webstore/detail/ember-inspector/bmdblncegkenkacieihfhpjfppoconhi)
-  * [ember inspector for firefox](https://addons.mozilla.org/en-US/firefox/addon/ember-inspector/)
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/adapters/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/adapters/application.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/adapters/application.js
deleted file mode 100644
index d096f68..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/adapters/application.js
+++ /dev/null
@@ -1,47 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-import DS from 'ember-data';
-import Ember from 'ember';
-
-export default DS.RESTAdapter.extend({
-  namespace: Ember.computed(function() {
-    var parts = window.location.pathname.match(/\/[^\/]*/g);
-    var view = parts[1];
-    var version = '/versions' + parts[2];
-    var instance = parts[3];
-    if (parts.length === 4) { // version is not present
-      instance = parts[2];
-      version = '';
-    }
-    return 'api/v1/views' + view + version + '/instances' + instance + '/resources';
-   //return "";
-  }),
-
-   handleResponse: function(status, headers, payload){
-      if(status === 500 && payload){
-        return payload;
-      }
-      return this._super(...arguments);
-    },
-
-  headers: {
-    'X-Requested-By': 'ambari',
-    'Authorization': 'Basic YWRtaW46YWRtaW4='
-  }
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/app.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/app.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/app.js
deleted file mode 100644
index 4028e22..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/app.js
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-import Resolver from './resolver';
-import loadInitializers from 'ember-load-initializers';
-import config from './config/environment';
-
-let App;
-
-Ember.MODEL_FACTORY_INJECTIONS = true;
-
-App = Ember.Application.extend({
-  modulePrefix: config.modulePrefix,
-  podModulePrefix: config.podModulePrefix,
-  Resolver
-});
-
-loadInitializers(App, config.modulePrefix);
-
-
-
-
-export default App;

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/components/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/components/.gitkeep b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/components/.gitkeep
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/controllers/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/controllers/.gitkeep b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/controllers/.gitkeep
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/helpers/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/helpers/.gitkeep b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/helpers/.gitkeep
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/index.html
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/index.html b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/index.html
deleted file mode 100644
index 620f4a2..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/index.html
+++ /dev/null
@@ -1,40 +0,0 @@
-<!---
-Licensed to the Apache Software Foundation (ASF) under one or more
-contributor license agreements.  See the NOTICE file distributed with
-this work for additional information regarding copyright ownership.
-The ASF licenses this file to You under the Apache License, Version 2.0
-(the "License"); you may not use this file except in compliance with
-the License.  You may obtain a copy of the License at [http://www.apache.org/licenses/LICENSE-2.0](http://www.apache.org/licenses/LICENSE-2.0)
-
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
--->
-<!DOCTYPE html>
-<html>
-  <head>
-    <meta charset="utf-8">
-    <meta http-equiv="X-UA-Compatible" content="IE=edge">
-    <title>HueambarimigrationView</title>
-    <meta name="description" content="">
-    <meta name="viewport" content="width=device-width, initial-scale=1">
-
-    {{content-for "head"}}
-
-    <link rel="stylesheet" href="assets/vendor.css">
-    <link rel="stylesheet" href="assets/hueambarimigration-view.css">
-
-
-    {{content-for "head-footer"}}
-  </head>
-  <body>
-    {{content-for "body"}}
-
-    <script src="assets/vendor.js"></script>
-    <script src="assets/hueambarimigration-view.js"></script>
-
-    {{content-for "body-footer"}}
-  </body>
-</html>

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/.gitkeep b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/.gitkeep
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/allinstancedetail.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/allinstancedetail.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/allinstancedetail.js
deleted file mode 100644
index 5f02afc..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/allinstancedetail.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  instanceName: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/ambaridatabase.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/ambaridatabase.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/ambaridatabase.js
deleted file mode 100644
index ec458dc..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/ambaridatabase.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  configStatus: DS.attr('string'),
-  configParameter: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/ambariwebhdfsurl.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/ambariwebhdfsurl.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/ambariwebhdfsurl.js
deleted file mode 100644
index ec458dc..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/ambariwebhdfsurl.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  configStatus: DS.attr('string'),
-  configParameter: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
deleted file mode 100644
index 1f4d2c6..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/checkprogress.js
+++ /dev/null
@@ -1,30 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  progressPercentage: DS.attr('string'),
-  numberOfQueryTransfered: DS.attr('string'),
-  totalNoQuery: DS.attr('string'),
-  intanceName: DS.attr('string'),
-  userNameofhue: DS.attr('string'),
-  totalTimeTaken: DS.attr('string'),
-  jobtype: DS.attr('string'),
-  isNoQuerySelected: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/getmigrationresultid.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/getmigrationresultid.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/getmigrationresultid.js
deleted file mode 100644
index 5d55773..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/getmigrationresultid.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  idforJob: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/hiveinstancedetail.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/hiveinstancedetail.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/hiveinstancedetail.js
deleted file mode 100644
index 5f02afc..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/hiveinstancedetail.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  instanceName: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huedatabase.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huedatabase.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huedatabase.js
deleted file mode 100644
index 620f806..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huedatabase.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  configStatus: DS.attr('string'),
-  configParameter: DS.attr('string')
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huehttpurl.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huehttpurl.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huehttpurl.js
deleted file mode 100644
index 620f806..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huehttpurl.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  configStatus: DS.attr('string'),
-  configParameter: DS.attr('string')
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huewebhdfsurl.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huewebhdfsurl.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huewebhdfsurl.js
deleted file mode 100644
index 620f806..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/huewebhdfsurl.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  configStatus: DS.attr('string'),
-  configParameter: DS.attr('string')
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/piginstancedetail.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/piginstancedetail.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/piginstancedetail.js
deleted file mode 100644
index 5f02afc..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/piginstancedetail.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  instanceName: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js
deleted file mode 100644
index 5d55773..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobid.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  idforJob: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobidforrevertchange.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobidforrevertchange.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobidforrevertchange.js
deleted file mode 100644
index 5d55773..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/returnjobidforrevertchange.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  idforJob: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js
deleted file mode 100644
index 5913726..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startmigration.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  progressPercentage: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startrevertchange.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startrevertchange.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startrevertchange.js
deleted file mode 100644
index 5913726..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/startrevertchange.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  progressPercentage: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/usersdetail.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/usersdetail.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/usersdetail.js
deleted file mode 100644
index 0313a16..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/models/usersdetail.js
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Model from 'ember-data/model';
-import DS from 'ember-data';
-
-export default Model.extend({
-  username: DS.attr('string')
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/resolver.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/resolver.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/resolver.js
deleted file mode 100644
index 884fa9b..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/resolver.js
+++ /dev/null
@@ -1,20 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Resolver from 'ember-resolver';
-
-export default Resolver;

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/router.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/router.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/router.js
deleted file mode 100644
index 6d99750..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/router.js
+++ /dev/null
@@ -1,38 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-import config from './config/environment';
-
-const Router = Ember.Router.extend({
-  location: config.locationType
-});
-
-Router.map(function() {
-  this.route('check-configuration');
-  this.route('homePage', function() {
-    this.route('hive-history');
-    this.route('hive-saved-query');
-    this.route('pig-script');
-    this.route('pig-job');
-    this.route('revert-change');
-  });
-
-
-});
-
-export default Router;

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/.gitkeep
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/.gitkeep b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/.gitkeep
deleted file mode 100644
index e69de29..0000000

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/check-configuration.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/check-configuration.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/check-configuration.js
deleted file mode 100644
index 8e0f28d..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/check-configuration.js
+++ /dev/null
@@ -1,215 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-
-export default Ember.Route.extend({
-
-  actions: {
-    toggleStackTrace: function() {
-      var value = this.controller.get('isExpanded');
-      this.controller.set('isExpanded', !value);
-    }
-  },
-
-  isExpanded: false,
-
-  setupController: function() {
-    var progresBar = 0;
-    var passtonext = 0;
-    var self = this;
-    var control = this.controller;
-    this.controller.set('progresBar', progresBar);
-    var huehttpurl = this.store.queryRecord('huehttpurl', {});
-    var huewebhdfsurl = this.store.queryRecord('huewebhdfsurl', {});
-    var ambariwebhdfs = this.store.queryRecord('ambariwebhdfsurl', {});
-    var huedatabases = this.store.queryRecord('huedatabase', {});
-    var ambaridatabases = this.store.queryRecord('ambaridatabase', {});
-
-    huehttpurl.then(function() {
-        passtonext = passtonext + 20;
-        progresBar = progresBar + 20;
-        control.set('progresBar', progresBar);
-        control.set('huehttpurlTestDone', "set");
-
-        if (huehttpurl.get('configStatus') === "Success") {
-          control.set('huehttpurlTest', "Success");
-          control.set('huehttpurlTestresult', "Hue Http URl test Successful");
-        }
-
-        if (passtonext === 100) {
-          Ember.run.later(this, function() {
-            self.transitionTo('homePage');
-          }, 4000);
-        }
-      },
-      function(error) {
-        progresBar = progresBar + 20;
-        var checkFailedMessage;
-        control.set('progresBar', progresBar);
-        control.set('huehttpurlTestresult', "Hue Http URl test Failed");
-        if (error.status !== 200) {
-          checkFailedMessage = "Service Hue Database check failed";
-          var errors;
-          errors = checkFailedMessage;
-          errors += (error.message) ? (': <i>' + error.message + '</i><br>') : '<br>';
-
-          control.set("errors", errors);
-        }
-        if (error.trace != null) {
-          var stackTrace;
-          stackTrace = checkFailedMessage + ':\n' + error.trace;
-          control.set("stackTrace", stackTrace);
-        }
-      });
-    huewebhdfsurl.then(function() {
-        passtonext = passtonext + 20;
-        progresBar = progresBar + 20;
-        control.set('huewebhdfsurlTestDone', progresBar);
-
-        if (huewebhdfsurl.get('configStatus') === "Success") {
-          control.set('huewebhdfsurlTest', "Success");
-          control.set('huewebhdfsurlTestresult', "Hue Webhdfs url test Successful");
-        }
-        if (passtonext === 100) {
-          Ember.run.later(this, function() {
-            self.transitionTo('homePage');
-          }, 4000);
-        }
-      },
-      function(error) {
-        progresBar = progresBar + 20;
-        var checkFailedMessage;
-        control.set('progresBar', progresBar);
-        if (error.status !== 200) {
-          checkFailedMessage = "Service Hue Database check failed";
-          var errors;
-          errors = checkFailedMessage;
-          errors += (error.message) ? (': <i>' + error.message + '</i><br>') : '<br>';
-          control.set("errors", errors);
-        }
-        if (error.trace != null) {
-          var stackTrace;
-          stackTrace = checkFailedMessage + ':\n' + error.trace;
-          control.set("stackTrace", stackTrace);
-        }
-      });
-    ambariwebhdfs.then(function() {
-        progresBar = progresBar + 20;
-        passtonext = passtonext + 20;
-        control.set('progresBar', progresBar);
-        control.set('ambariwebhdfsTestDone', progresBar);
-
-        if (ambariwebhdfs.get('configStatus') === "Success") {
-          control.set('ambariwebhdfsTest', "Success");
-          control.set('ambariwebhdfsTestresult', "Ambari Webhdfs url test Successful");
-        }
-        if (passtonext === 100) {
-          Ember.run.later(this, function() {
-            self.transitionTo('homePage');
-          }, 4000);
-        }
-      },
-      function(error) {
-        progresBar = progresBar + 20;
-        var checkFailedMessage;
-        control.set('progresBar', progresBar);
-        if (error.status !== 200) {
-          checkFailedMessage = "Service Hue Database check failed";
-          var errors;
-          errors = checkFailedMessage;
-          errors += (error.message) ? (': <i>' + error.message + '</i><br>') : '<br>';
-          control.set("errors", errors);
-        }
-        if (error.trace != null) {
-          var stackTrace;
-          stackTrace = checkFailedMessage + ':\n' + error.trace;
-          control.set("stackTrace", stackTrace);
-        }
-      });
-    huedatabases.then(function() {
-        passtonext = passtonext + 20;
-        progresBar = progresBar + 20;
-        control.set('progresBar', progresBar);
-        control.set('huedatabasesTestDone', progresBar);
-
-        if (huedatabases.get('configStatus') === "Success") {
-          control.set('huedatabasesTest', "Success");
-          control.set('huedatabasesTestresult', "Hue database Connection test Successful");
-        }
-        if (passtonext === 100) {
-          Ember.run.later(this, function() {
-            self.transitionTo('homePage');
-          }, 4000);
-        }
-      },
-      function(error) {
-        control.set('huedatabasesTest', "Success");
-        control.set('huedatabasesTest', null);
-        progresBar = progresBar + 20;
-        var checkFailedMessage;
-        control.set('progresBar', progresBar);
-        control.set('huedatabasesTestresult', "Hue database Connection test Failed");
-        if (error.status !== 200) {
-          checkFailedMessage = "Service Hue Database check failed";
-          var errors;
-          errors = checkFailedMessage;
-          errors += (error.message) ? (': <i>' + error.message + '</i><br>') : '<br>';
-          control.set("errors", errors);
-        }
-        if (error.trace !== null) {
-          var stackTrace;
-          stackTrace = checkFailedMessage + ':\n' + error.trace;
-          control.set("stackTrace", stackTrace);
-        }
-      });
-    ambaridatabases.then(function() {
-        passtonext = passtonext + 20;
-        progresBar = progresBar + 20;
-        control.set('progresBar', progresBar);
-        control.set('ambaridatabasesTestDone', progresBar);
-        if (ambaridatabases.get('configStatus') === "Success") {
-          control.set('ambaridatabasesTest', "Success");
-          control.set('ambaridatabasesTestresult', "Ambari database Connection test Successful");
-        }
-        if (passtonext === 100) {
-          Ember.run.later(this, function() {
-            self.transitionTo('homePage');
-          }, 4000);
-        }
-      },
-      function(error) {
-        progresBar = progresBar + 20;
-        var checkFailedMessage;
-        control.set('progresBar', progresBar);
-        if (error.status !== 200) {
-          checkFailedMessage = "Service Hue Database check failed";
-          var errors;
-          errors = checkFailedMessage;
-          errors += (error.message) ? (': <i>' + error.message + '</i><br>') : '<br>';
-          control.set("errors", errors);
-        }
-
-        if (error.trace !== null) {
-          var stackTrace;
-          stackTrace = checkFailedMessage + ':\n' + error.trace;
-          control.set("stackTrace", stackTrace);
-        }
-      });
-  }
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page.js
deleted file mode 100644
index ff14e60..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page.js
+++ /dev/null
@@ -1,21 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-export default Ember.Route.extend({
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
deleted file mode 100644
index a47b93f..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-history.js
+++ /dev/null
@@ -1,106 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-export default Ember.Route.extend({
-
-  model: function() {
-    var store = this.store;
-    return Ember.RSVP.hash({
-      usersdetail: store.findAll('usersdetail'),
-      hiveinstancedetail: store.findAll('hiveinstancedetail')
-
-    });
-
-  },
-
-  actions: {
-    submitResult: function() {
-      this.controller.set('jobstatus', null);
-      this.controller.set('progressBar', null);
-      this.controller.set('completionStatus', null);
-      var migration = this.store.queryRecord('returnjobid', {
-        username: this.controller.get('usernamehue'),
-        instance: this.controller.get('instancename'),
-        startdate: this.controller.get('startdate'),
-        enddate: this.controller.get('enddate'),
-        jobtype: "hivehistoryquerymigration"
-      });
-      var control = this.controller;
-      var store = this.store;
-      var repeat = this;
-
-      migration.then(function() {
-        var jobid = migration.get('idforJob');
-        console.log("jobid  is   " + jobid);
-        var hivehistoryqueryjobstart = store.queryRecord('startmigration', {
-          username: control.get('usernamehue'),
-          instance: control.get('instancename'),
-          startdate: control.get('startdate'),
-          enddate: control.get('enddate'),
-          jobid: jobid,
-          jobtype: "hivehistoryquerymigration"
-        });
-        hivehistoryqueryjobstart.then(function() {
-          control.set('jobstatus', "0");
-          repeat.progresscheck(jobid);
-        });
-      });
-    }
-  },
-  progresscheck: function(jobid) {
-    var repeat = this;
-    var control = this.controller;
-
-    Ember.run.later(this, function() {
-      var progress = this.store.queryRecord('checkprogress', {
-        jobid: jobid
-      });
-      progress.then(function() {
-        // control.set('jobstatus',null);
-        var progressPercentage = progress.get('progressPercentage');
-        var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
-        var totalNoQuery = progress.get('totalNoQuery');
-        var intanceName = progress.get('intanceName');
-        var userNameofhue = progress.get('userNameofhue');
-        var totalTimeTaken = progress.get('totalTimeTaken');
-        var isNoQuerySelected = progress.get('isNoQuerySelected');
-
-        if (progressPercentage !== '100' && isNoQuerySelected === 'no') {
-          control.set('progressBar', progressPercentage);
-          repeat.progresscheck(jobid);
-        }
-        if (progressPercentage === '100' || isNoQuerySelected === 'yes') {
-          control.set('jobstatus', null);
-          control.set('completionStatus', progressPercentage);
-          control.set('progressBar', progressPercentage);
-          if (numberOfQueryTransfered === "0") {
-            control.set('numberOfQueryTransfered', "No Queries selected according to your criteria");
-          } else {
-            control.set('numberOfQueryTransfered', numberOfQueryTransfered);
-          }
-          control.set('totalNoQuery', totalNoQuery);
-          control.set('instanceName', intanceName);
-          control.set('Username', userNameofhue);
-          control.set('totalTimeTaken', totalTimeTaken);
-
-        }
-      });
-    }, 500);
-  }
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
deleted file mode 100644
index d50a402..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/hive-saved-query.js
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-export default Ember.Route.extend({
-
-  model: function() {
-    var store = this.store;
-    return Ember.RSVP.hash({
-      usersdetail: store.findAll('usersdetail'),
-      hiveinstancedetail: store.findAll('hiveinstancedetail')
-
-    });
-
-  },
-
-  actions: {
-    submitResult: function() {
-      this.controller.set('jobstatus', null);
-      this.controller.set('progressBar', null);
-      this.controller.set('completionStatus', null);
-      var migration = this.store.queryRecord('returnjobid', {
-        username: this.controller.get('usernamehue'),
-        instance: this.controller.get('instancename'),
-        startdate: this.controller.get('startdate'),
-        enddate: this.controller.get('enddate'),
-        jobtype: "hivesavedquerymigration"
-      });
-      var control = this.controller;
-      var store = this.store;
-      var repeat = this;
-
-      migration.then(function() {
-        var jobid = migration.get('idforJob');
-        var hivehistoryqueryjobstart = store.queryRecord('startmigration', {
-          username: control.get('usernamehue'),
-          instance: control.get('instancename'),
-          startdate: control.get('startdate'),
-          enddate: control.get('enddate'),
-          jobid: jobid,
-          jobtype: "hivesavedquerymigration"
-        });
-        hivehistoryqueryjobstart.then(function() {
-          control.set('jobstatus', "0");
-          repeat.progresscheck(jobid);
-        });
-      });
-    }
-  },
-  progresscheck: function(jobid) {
-    var repeat = this;
-    var control = this.controller;
-    Ember.run.later(this, function() {
-      var progress = this.store.queryRecord('checkprogress', {
-        jobid: jobid
-      });
-      progress.then(function() {
-        var progressPercentage = progress.get('progressPercentage');
-        var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
-        var totalNoQuery = progress.get('totalNoQuery');
-        var intanceName = progress.get('intanceName');
-        var userNameofhue = progress.get('userNameofhue');
-        var totalTimeTaken = progress.get('totalTimeTaken');
-        var isNoQuerySelected = progress.get('isNoQuerySelected');
-        if (progressPercentage !== '100' && isNoQuerySelected === 'no') {
-          control.set('progressBar', progressPercentage);
-          repeat.progresscheck(jobid);
-        }
-        if (progressPercentage === '100' || isNoQuerySelected === 'yes') {
-          control.set('jobstatus', null);
-          control.set('completionStatus', progressPercentage);
-          control.set('progressBar', progressPercentage);
-          if (numberOfQueryTransfered === "0") {
-            control.set('numberOfQueryTransfered', "No Queries selected according to your criteria");
-          } else {
-            control.set('numberOfQueryTransfered', numberOfQueryTransfered);
-          }
-          control.set('totalNoQuery', totalNoQuery);
-          control.set('instanceName', intanceName);
-          control.set('Username', userNameofhue);
-          control.set('totalTimeTaken', totalTimeTaken);
-
-        }
-      });
-    }, 500);
-  }
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
deleted file mode 100644
index debb1eb..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-job.js
+++ /dev/null
@@ -1,102 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-export default Ember.Route.extend({
-
-  model: function() {
-    var store = this.store;
-    return Ember.RSVP.hash({
-      usersdetail: store.findAll('usersdetail'),
-      piginstancedetail: store.findAll('piginstancedetail')
-
-    });
-
-  },
-
-  actions: {
-    submitResult: function() {
-      this.controller.set('jobstatus', null);
-      this.controller.set('progressBar', null);
-      this.controller.set('completionStatus', null);
-      var migration = this.store.queryRecord('returnjobid', {
-        username: this.controller.get('usernamehue'),
-        instance: this.controller.get('instancename'),
-        startdate: this.controller.get('startdate'),
-        enddate: this.controller.get('enddate'),
-        jobtype: "pigjobmigration"
-      });
-      var control = this.controller;
-      var store = this.store;
-      var repeat = this;
-
-      migration.then(function() {
-        var jobid = migration.get('idforJob');
-        var hivehistoryqueryjobstart = store.queryRecord('startmigration', {
-          username: control.get('usernamehue'),
-          instance: control.get('instancename'),
-          startdate: control.get('startdate'),
-          enddate: control.get('enddate'),
-          jobid: jobid,
-          jobtype: "pigjobmigration"
-        });
-        hivehistoryqueryjobstart.then(function() {
-          control.set('jobstatus', "0");
-          repeat.progresscheck(jobid);
-        });
-      });
-    }
-  },
-  progresscheck: function(jobid) {
-    var repeat = this;
-    var control = this.controller;
-    Ember.run.later(this, function() {
-      var progress = this.store.queryRecord('checkprogress', {
-        jobid: jobid
-      });
-      progress.then(function() {
-        var progressPercentage = progress.get('progressPercentage');
-        var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
-        var totalNoQuery = progress.get('totalNoQuery');
-        var intanceName = progress.get('intanceName');
-        var userNameofhue = progress.get('userNameofhue');
-        var totalTimeTaken = progress.get('totalTimeTaken');
-        var isNoQuerySelected = progress.get('isNoQuerySelected');
-        if (progressPercentage !== '100' && isNoQuerySelected === 'no') {
-          control.set('progressBar', progressPercentage);
-          repeat.progresscheck(jobid);
-        }
-        if (progressPercentage === '100' || isNoQuerySelected === 'yes') {
-          control.set('jobstatus', null);
-          control.set('completionStatus', progressPercentage);
-          control.set('progressBar', progressPercentage);
-          if (numberOfQueryTransfered === "0") {
-            control.set('numberOfQueryTransfered', "No Queries selected according to your criteria");
-          } else {
-            control.set('numberOfQueryTransfered', numberOfQueryTransfered);
-          }
-          control.set('totalNoQuery', totalNoQuery);
-          control.set('instanceName', intanceName);
-          control.set('Username', userNameofhue);
-          control.set('totalTimeTaken', totalTimeTaken);
-
-        }
-      });
-    }, 500);
-  }
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
deleted file mode 100644
index ab0b919..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/pig-script.js
+++ /dev/null
@@ -1,103 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-export default Ember.Route.extend({
-
-  model: function() {
-    var store = this.store;
-    return Ember.RSVP.hash({
-      usersdetail: store.findAll('usersdetail'),
-      piginstancedetail: store.findAll('piginstancedetail')
-
-    });
-
-  },
-
-  actions: {
-    submitResult: function() {
-      this.controller.set('jobstatus', null);
-      this.controller.set('progressBar', null);
-      this.controller.set('completionStatus', null);
-      var migration = this.store.queryRecord('returnjobid', {
-        username: this.controller.get('usernamehue'),
-        instance: this.controller.get('instancename'),
-        startdate: this.controller.get('startdate'),
-        enddate: this.controller.get('enddate'),
-        jobtype: "pigsavedscriptmigration"
-      });
-      var control = this.controller;
-      var store = this.store;
-      var repeat = this;
-
-      migration.then(function() {
-        var jobid = migration.get('idforJob');
-        var hivehistoryqueryjobstart = store.queryRecord('startmigration', {
-          username: control.get('usernamehue'),
-          instance: control.get('instancename'),
-          startdate: control.get('startdate'),
-          enddate: control.get('enddate'),
-          jobid: jobid,
-          jobtype: "pigsavedscriptmigration"
-        });
-        hivehistoryqueryjobstart.then(function() {
-          control.set('jobstatus', "0");
-          repeat.progresscheck(jobid);
-        });
-      });
-    }
-  },
-  progresscheck: function(jobid) {
-    var repeat = this;
-    var control = this.controller;
-    Ember.run.later(this, function() {
-      var progress = this.store.queryRecord('checkprogress', {
-        jobid: jobid
-      });
-      progress.then(function() {
-        // control.set('jobstatus',null);
-        var progressPercentage = progress.get('progressPercentage');
-        var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
-        var totalNoQuery = progress.get('totalNoQuery');
-        var intanceName = progress.get('intanceName');
-        var userNameofhue = progress.get('userNameofhue');
-        var totalTimeTaken = progress.get('totalTimeTaken');
-        var isNoQuerySelected = progress.get('isNoQuerySelected');
-        if (progressPercentage !== '100' && isNoQuerySelected === 'no') {
-          control.set('progressBar', progressPercentage);
-          repeat.progresscheck(jobid);
-        }
-        if (progressPercentage === '100' || isNoQuerySelected === 'yes') {
-          control.set('jobstatus', null);
-          control.set('completionStatus', progressPercentage);
-          control.set('progressBar', progressPercentage);
-          if (numberOfQueryTransfered === "0") {
-            control.set('numberOfQueryTransfered', "No Queries selected according to your criteria");
-          } else {
-            control.set('numberOfQueryTransfered', numberOfQueryTransfered);
-          }
-          control.set('totalNoQuery', totalNoQuery);
-          control.set('instanceName', intanceName);
-          control.set('Username', userNameofhue);
-          control.set('totalTimeTaken', totalTimeTaken);
-
-        }
-      });
-    }, 500);
-  }
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js
deleted file mode 100644
index b34b5ed..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/home-page/revert-change.js
+++ /dev/null
@@ -1,97 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-
-export default Ember.Route.extend({
-
-  model: function() {
-    var store = this.store;
-    return Ember.RSVP.hash({
-      allinstancedetail: store.findAll('allinstancedetail')
-    });
-  },
-
-  actions: {
-    submitResult: function() {
-      this.controller.set('jobstatus', null);
-      this.controller.set('progressBar', null);
-      this.controller.set('completionStatus', null);
-      var migration = this.store.queryRecord('returnjobidforrevertchange', {
-        instance: this.controller.get('instancename'),
-        revertdate: this.controller.get('revertdate')
-      });
-      var control = this.controller;
-      var store = this.store;
-      var repeat = this;
-      migration.then(function() {
-        var jobid = migration.get('idforJob');
-        var hivehistoryqueryjobstart = store.queryRecord('startrevertchange', {
-          instance: control.get('instancename'),
-          revertdate: control.get('revertdate'),
-          jobid: jobid
-        });
-        hivehistoryqueryjobstart.then(function() {  
-          control.set('jobstatus', "0");
-          repeat.progresscheck(jobid);
-        });
-      });
-    }
-  },
-
-  progresscheck: function(jobid) {
-
-    var repeat = this;
-    var control = this.controller;
-    Ember.run.later(this, function() {
-      var progress = this.store.queryRecord('checkprogress', {
-        jobid: jobid
-      });
-      progress.then(function() {
-
-        var progressPercentage = progress.get('progressPercentage');
-        var numberOfQueryTransfered = progress.get('numberOfQueryTransfered');
-        var totalNoQuery = progress.get('totalNoQuery');
-        var intanceName = progress.get('intanceName');
-        var userNameofhue = progress.get('userNameofhue');
-        var totalTimeTaken = progress.get('totalTimeTaken');
-        //var jobtype=progress.get('jobtype');
-        var isNoQuerySelected = progress.get('isNoQuerySelected');
-        if (progressPercentage !== '100' && isNoQuerySelected === 'no') {
-          control.set('progressBar', progressPercentage);
-          repeat.progresscheck(jobid);
-        }
-        if (progressPercentage === '100' || isNoQuerySelected === 'yes') {
-          control.set('jobstatus', null);
-          control.set('completionStatus', progressPercentage);
-          control.set('progressBar', progressPercentage);
-          if (numberOfQueryTransfered === "0") {
-            control.set('numberOfQueryTransfered', "No Queries selected according to your criteria");
-          } else {
-            control.set('numberOfQueryTransfered', numberOfQueryTransfered);
-          }
-          control.set('totalNoQuery', totalNoQuery);
-          control.set('instanceName', intanceName);
-          control.set('Username', userNameofhue);
-          control.set('totalTimeTaken', totalTimeTaken);
-        }
-      });
-    }, 500);
-  }
-
-});
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/index.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/index.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/index.js
deleted file mode 100644
index 80f2600..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/routes/index.js
+++ /dev/null
@@ -1,24 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import Ember from 'ember';
-
-export default Ember.Route.extend({
-  beforeModel() {
-    this.transitionTo('check-configuration');
-  }
-});

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/serializers/application.js
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/serializers/application.js b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/serializers/application.js
deleted file mode 100644
index 6e1291b..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/serializers/application.js
+++ /dev/null
@@ -1,19 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-import DS from 'ember-data';
-export default DS.RESTSerializer.extend({});

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/styles/app.css
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/styles/app.css b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/styles/app.css
deleted file mode 100644
index 3196113..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/styles/app.css
+++ /dev/null
@@ -1,25 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-body{
-padding-top:10px;
-}
-
-nav.active
-{
-font-weight: bold;
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/styles/app.scss
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/styles/app.scss b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/styles/app.scss
deleted file mode 100644
index dbea46e..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/styles/app.scss
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-@import "ember-power-select";

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/application.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/application.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/application.hbs
deleted file mode 100644
index 4e484d3..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/application.hbs
+++ /dev/null
@@ -1,24 +0,0 @@
-{{!
-   Licensed to the Apache Software Foundation (ASF) under one
-   or more contributor license agreements.  See the NOTICE file
-   distributed with this work for additional information
-   regarding copyright ownership.  The ASF licenses this file
-   to you under the Apache License, Version 2.0 (the
-   "License"); you may not use this file except in compliance
-   with the License.  You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-}}
-<div class="container">
-  {{outlet}}
-
-</div>
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/check-configuration.hbs
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/check-configuration.hbs b/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/check-configuration.hbs
deleted file mode 100644
index ad666be..0000000
--- a/contrib/views/hueambarimigration/src/main/resources/ui/hueambarimigration-view/app/templates/check-configuration.hbs
+++ /dev/null
@@ -1,153 +0,0 @@
-{{!
-Licensed to the Apache Software Foundation (ASF) under one
-or more contributor license agreements.  See the NOTICE file
-distributed with this work for additional information
-regarding copyright ownership.  The ASF licenses this file
-to you under the Apache License, Version 2.0 (the
-"License"); you may not use this file except in compliance
-with the License.  You may obtain a copy of the License at
-http://www.apache.org/licenses/LICENSE-2.0
-Unless required by applicable law or agreed to in writing, software
-distributed under the License is distributed on an "AS IS" BASIS,
-WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-See the License for the specific language governing permissions and
-limitations under the License.
-}}
-<div class="container-fluid">
-  <h1>Welcome to HueMigration View
-  </h1>
-  <h2>Please wait...
-  </h2>
-  {{#if errors}}
-    <div class="progress progress-striped active">
-      <div id="progressbarhivesavedquery" class="progress-bar progress-bar-danger" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="50"  style="width:{{progresBar}}%">
-      </div>
-    </div>
-  {{else}}
-    <div class="progress progress-striped active">
-      <div id="progressbarhivesavedquery" class="progress-bar progress-bar-success" role="progressbar" aria-valuenow="50" aria-valuemin="0" aria-valuemax="50"  style="width:{{progresBar}}%">
-      </div>
-    </div>
-  {{/if}}
-  <table class="table">
-    <tbody>
-    <tr>
-      <td>
-        {{#if huehttpurlTestDone}}
-          {{#if huehttpurlTest}}
-            <span class="glyphicon glyphicon-ok green">
-          </span>
-          {{else}}
-            <span class="glyphicon glyphicon-remove red">
-          </span>
-          {{/if}}
-        {{else}}
-          <span class="glyphicon glyphicon-arrow-right">
-          </span>
-        {{/if}}
-      </td>
-      <td>
-        Hue Http Test
-      </td>
-    </tr>
-    <tr>
-      <td>
-        {{#if huewebhdfsurlTestDone}}
-          {{#if huewebhdfsurlTest}}
-            <span class="glyphicon glyphicon-ok green">
-          </span>
-          {{else}}
-            <span class="glyphicon glyphicon-remove red">
-          </span>
-          {{/if}}
-        {{else}}
-          <span class="glyphicon glyphicon-arrow-right">
-          </span>
-        {{/if}}
-      </td>
-      <td>
-        Hue Webhdfs Test
-      </td>
-    </tr>
-    <tr>
-      <td>
-        {{#if ambariwebhdfsTestDone}}
-          {{#if ambariwebhdfsTest}}
-            <span class="glyphicon glyphicon-ok green">
-          </span>
-          {{else}}
-            <span class="glyphicon glyphicon-remove red">
-          </span>
-          {{/if}}
-        {{else}}
-          <span class="glyphicon glyphicon-arrow-right">
-          </span>
-        {{/if}}
-      </td>
-      <td>
-        Ambari Webhdfs Test
-      </td>
-    </tr>
-    <tr>
-      <td>
-        {{#if huedatabasesTestDone}}
-          {{#if huedatabasesTest}}
-            <span class="glyphicon glyphicon-ok green">
-          </span>
-          {{else}}
-            <span class="glyphicon glyphicon-remove red">
-          </span>
-          {{/if}}
-        {{else}}
-          <span class="glyphicon glyphicon-arrow-right">
-          </span>
-        {{/if}}
-      </td>
-      <td>
-        Hue Database Test
-      </td>
-    </tr>
-    <tr>
-      <td>
-        {{#if ambaridatabasesTestDone}}
-          {{#if ambaridatabasesTest}}
-            <span class="glyphicon glyphicon-ok green">
-          </span>
-          {{else}}
-            <span class="glyphicon glyphicon-remove red">
-          </span>
-          {{/if}}
-        {{else}}
-          <span class="glyphicon glyphicon-arrow-right">
-          </span>
-        {{/if}}
-      </td>
-      <td>
-        Ambari Database Test
-      </td>
-    </tr>
-    </tbody>
-  </table>
-  {{#if errors}}
-    <h3>Issues detected
-    </h3>
-    <p>{{{errors}}}
-    </p>
-  {{/if}}
-  {{#if stackTrace}}
-    <a href="#" {{action "toggleStackTrace" post}}>
-      {{#if isExpanded}}
-        <span class="glyphicon glyphicon-collapse-down">
-    </span> Collapse Stack Trace
-      {{else}}
-        <span class="glyphicon glyphicon-expand">
-    </span> Expand Stack Trace
-      {{/if}}
-    </a>
-    {{#if isExpanded}}
-      <pre class="prettyprint">
-        {{stackTrace}}
-      </pre>
-    {{/if}}
-  {{/if}}
-</div>


[04/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java
deleted file mode 100644
index d7f2868..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/InstanceModel.java
+++ /dev/null
@@ -1,41 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-
-public class InstanceModel {
-
-     String instanceName;
-     int id;
-
-     public String getInstanceName() {
-          return instanceName;
-     }
-
-     public void setInstanceName(String instanceName) {
-          this.instanceName = instanceName;
-     }
-
-     public int getId() {
-          return id;
-     }
-
-     public void setId(int id) {
-          this.id = id;
-     }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java
deleted file mode 100644
index 1a247bb..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/JobReturnIdModel.java
+++ /dev/null
@@ -1,43 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-/**
- * Created by temp on 5/19/16.
- */
-public class JobReturnIdModel {
-
-  int id;
-  String idforJob;
-
-  public String getIdforJob() {
-    return idforJob;
-  }
-
-  public void setIdforJob(String idforJob) {
-    this.idforJob = idforJob;
-  }
-
-  public int getId() {
-    return id;
-  }
-
-  public void setId(int id) {
-    this.id = id;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
deleted file mode 100644
index f765e15..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationModel.java
+++ /dev/null
@@ -1,130 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.PersonalResource;
-import org.apache.commons.beanutils.BeanUtils;
-
-import java.io.Serializable;
-import java.lang.reflect.InvocationTargetException;
-import java.util.Map;
-
-public class MigrationModel implements Serializable,PersonalResource{
-
-
-
-  private String id;
-
-  private int numberOfQueryTransfered;
-  private String intanceName="";
-  private String userNameofhue="";
-  private int totalNoQuery;
-  private String ProgressPercentage="";
-  private String owner = "";
-  private Boolean IfSuccess;
-  private String TimeTakentotransfer="";
-
-  public String getTimeTakentotransfer() {
-    return TimeTakentotransfer;
-  }
-
-  public void setTimeTakentotransfer(String timeTakentotransfer) {
-    TimeTakentotransfer = timeTakentotransfer;
-  }
-
-  public Boolean getIfSuccess() {
-    return IfSuccess;
-  }
-
-  public void setIfSuccess(Boolean ifSuccess) {
-    IfSuccess = ifSuccess;
-  }
-
-  public MigrationModel(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
-    BeanUtils.populate(this, stringObjectMap);
-  }
-
-  public MigrationModel() {
-
-  }
-
-
-  public String getId() {
-    return id;
-  }
-
-  public void setId(String id) {
-    this.id = id;
-  }
-
-  @Override
-  public int hashCode() {
-    return id.hashCode();
-  }
-
-
-
-
-  public String getIntanceName() {
-    return intanceName;
-  }
-
-  public void setIntanceName(String intanceName) {
-    this.intanceName = intanceName;
-  }
-
-  public String getUserNameofhue() {
-    return userNameofhue;
-  }
-
-  public void setUserNameofhue(String userNameofhue) {
-    this.userNameofhue = userNameofhue;
-  }
-
-  public int getNumberOfQueryTransfered() {
-    return numberOfQueryTransfered;
-  }
-
-  public void setNumberOfQueryTransfered(int numberOfQueryTransfered) {
-    this.numberOfQueryTransfered = numberOfQueryTransfered;
-  }
-
-  public int getTotalNoQuery() {
-    return totalNoQuery;
-  }
-
-  public void setTotalNoQuery(int totalNoQuery) {
-    this.totalNoQuery = totalNoQuery;
-  }
-
-  public String getProgressPercentage() {
-    return ProgressPercentage;
-  }
-
-  public void setProgressPercentage(String progressPercentage) {
-    ProgressPercentage = progressPercentage;
-  }
-
-  public String getOwner() {
-    return owner;
-  }
-
-  public void setOwner(String owner) {
-    this.owner = owner;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
deleted file mode 100644
index d3cd5b9..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/MigrationResponse.java
+++ /dev/null
@@ -1,143 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-import org.apache.commons.beanutils.BeanUtils;
-
-import java.lang.reflect.InvocationTargetException;
-import java.util.Map;
-
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.PersonalResource;
-import org.apache.commons.beanutils.BeanUtils;
-
-import java.io.Serializable;
-import java.lang.reflect.InvocationTargetException;
-import java.util.Date;
-import java.util.Map;
-
-public class MigrationResponse implements Serializable,PersonalResource{
-
-
-
-  private String id;
-
-  private int numberOfQueryTransfered;
-  private String intanceName="";
-  private String userNameofhue="";
-  private int totalNoQuery;
-  private int ProgressPercentage;
-  private String owner = "";
-  private String totalTimeTaken="";
-  private String jobtype="";
-  private String isNoQuerySelected="";
-
-  public String getTotalTimeTaken() {
-    return totalTimeTaken;
-  }
-
-  public void setTotalTimeTaken(String totalTimeTaken) {
-    this.totalTimeTaken = totalTimeTaken;
-  }
-
-  public String getIsNoQuerySelected() {
-    return isNoQuerySelected;
-  }
-
-  public void setIsNoQuerySelected(String isNoQuerySelected) {
-    this.isNoQuerySelected = isNoQuerySelected;
-  }
-
-  public String getJobtype() {
-    return jobtype;
-  }
-
-  public void setJobtype(String jobtype) {
-    this.jobtype = jobtype;
-  }
-
-  public MigrationResponse(Map<String, Object> stringObjectMap) throws InvocationTargetException, IllegalAccessException {
-    BeanUtils.populate(this, stringObjectMap);
-  }
-
-  public MigrationResponse() {
-
-  }
-
-
-  public int getTotalNoQuery() {
-    return totalNoQuery;
-  }
-
-  public void setTotalNoQuery(int totalNoQuery) {
-    this.totalNoQuery = totalNoQuery;
-  }
-
-  public String getId() {
-    return id;
-  }
-
-  public void setId(String id) {
-    this.id = id;
-  }
-
-  @Override
-  public int hashCode() {
-    return id.hashCode();
-  }
-
-  public int getNumberOfQueryTransfered() {
-    return numberOfQueryTransfered;
-  }
-
-  public void setNumberOfQueryTransfered(int numberOfQueryTransfered) {
-    this.numberOfQueryTransfered = numberOfQueryTransfered;
-  }
-
-  public String getIntanceName() {
-    return intanceName;
-  }
-
-  public void setIntanceName(String intanceName) {
-    this.intanceName = intanceName;
-  }
-
-  public String getUserNameofhue() {
-    return userNameofhue;
-  }
-
-  public void setUserNameofhue(String userNameofhue) {
-    this.userNameofhue = userNameofhue;
-  }
-
-
-  public int getProgressPercentage() {
-    return ProgressPercentage;
-  }
-
-  public void setProgressPercentage(int progressPercentage) {
-    ProgressPercentage = progressPercentage;
-  }
-
-  public String getOwner() {
-    return owner;
-  }
-
-  public void setOwner(String owner) {
-    this.owner = owner;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/PigModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/PigModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/PigModel.java
deleted file mode 100644
index 5bf6499..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/PigModel.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-import java.util.Date;
-
-public class PigModel {
-
-  private Date dt;
-  private String script;
-  private String Status;
-  private String title;
-  private String dir;
-
-  public Date getDt() {
-    return dt;
-  }
-
-  public void setDt(Date dt) {
-    this.dt = dt;
-  }
-
-  public String getScript() {
-    return script;
-  }
-
-  public void setScript(String script) {
-    this.script = script;
-  }
-
-  public String getStatus() {
-    return Status;
-  }
-
-  public void setStatus(String status) {
-    Status = status;
-  }
-
-  public String getTitle() {
-    return title;
-  }
-
-  public void setTitle(String title) {
-    this.title = title;
-  }
-
-  public String getDir() {
-    return dir;
-  }
-
-  public void setDir(String dir) {
-    this.dir = dir;
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ProgressCheckModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ProgressCheckModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ProgressCheckModel.java
deleted file mode 100644
index 0c66e17..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/ProgressCheckModel.java
+++ /dev/null
@@ -1,60 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-
-public class ProgressCheckModel {
-
-  private int progressPercentage;
-  private int noOfQueryCompleted;
-  private int noOfQueryLeft;
-  private int totalNoOfQuery;
-
-  public int getProgressPercentage() {
-    return progressPercentage;
-  }
-
-  public void setProgressPercentage(int progressPercentage) {
-    this.progressPercentage = progressPercentage;
-  }
-
-  public int getNoOfQueryCompleted() {
-    return noOfQueryCompleted;
-  }
-
-  public void setNoOfQueryCompleted(int noOfQueryCompleted) {
-    this.noOfQueryCompleted = noOfQueryCompleted;
-  }
-
-  public int getNoOfQueryLeft() {
-    return noOfQueryLeft;
-  }
-
-  public void setNoOfQueryLeft(int noOfQueryLeft) {
-    this.noOfQueryLeft = noOfQueryLeft;
-  }
-
-  public int getTotalNoOfQuery() {
-    return totalNoOfQuery;
-  }
-
-  public void setTotalNoOfQuery(int totalNoOfQuery) {
-    this.totalNoOfQuery = totalNoOfQuery;
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/UserModel.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/UserModel.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/UserModel.java
deleted file mode 100644
index 6ba651c..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/resources/scripts/models/UserModel.java
+++ /dev/null
@@ -1,48 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.resources.scripts.models;
-
-
-
-public class UserModel {
-
-  String username;
-  int id;
-
-  public String getUsername() {
-    return username;
-  }
-
-  public void setUsername(String username) {
-    this.username = username;
-  }
-
-  public int getId() {
-    return id;
-  }
-
-  public void setId(int id) {
-    this.id = id;
-  }
-}
-
-
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java
new file mode 100644
index 0000000..ac76e1c
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/configurationcheck/ConfFileReader.java
@@ -0,0 +1,199 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.configurationcheck;
+
+import java.io.BufferedReader;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.*;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.util.Properties;
+import javax.ws.rs.core.Context;
+
+import org.apache.ambari.view.ViewContext;
+import org.apache.ambari.view.AmbariStreamProvider;
+import org.apache.ambari.view.URLStreamProvider;
+
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
+import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
+import org.apache.log4j.Logger;
+
+public class ConfFileReader {
+
+  static final Logger logger = Logger.getLogger(ConfFileReader.class);
+
+  private static String homeDir = System.getProperty("java.io.tmpdir")+"/";
+
+  public static boolean checkConfigurationForHue(String hueURL) {
+
+    URL url = null;
+    int resonseCode = 0;
+    try {
+      url = new URL(hueURL);
+      HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("GET");  //OR  huc.setRequestMethod ("HEAD");
+      connection.connect();
+      resonseCode = connection.getResponseCode();
+
+
+    } catch (MalformedURLException e) {
+
+      logger.error("Error in accessing the URL:" , e);
+
+    } catch (ProtocolException e) {
+
+      logger.error("Error in protocol: ", e);
+    } catch (IOException e) {
+
+      logger.error("IO Exception while establishing connection:",e);
+    }
+
+    return resonseCode == 200 ;
+  }
+
+  public static boolean checkConfigurationForAmbari(String ambariURL) {
+
+
+    URL url = null;
+    int responseCode = 0;
+    try {
+      url = new URL(ambariURL);
+      HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+      connection.setRequestMethod("GET");  //OR  huc.setRequestMethod ("HEAD");
+      connection.connect();
+      responseCode = connection.getResponseCode();
+
+    } catch (MalformedURLException e) {
+      logger.error("Error in accessing the URL: " , e);
+
+    } catch (ProtocolException e) {
+      logger.error("Error in protocol: ", e);
+    } catch (IOException e) {
+      logger.error("IO Exception while establishing connection: ",e);
+    }
+    return responseCode == 200 ;
+
+
+  }
+
+  public static boolean checkHueDatabaseConnection(String hueDBDRiver, String hueJdbcUrl, String huedbUsername, String huedbPassword) throws IOException {
+
+    try {
+      Connection con = DataSourceHueDatabase.getInstance(hueDBDRiver, hueJdbcUrl, huedbUsername, huedbPassword).getConnection();
+    }
+    catch (Exception e) {
+
+      logger.error("Sql exception in acessing Hue Database: " ,e);
+      return false;
+    }
+
+    return true;
+
+  }
+
+  public static boolean checkAmbariDatbaseConection(String ambariDBDriver, String ambariDBJdbcUrl, String ambariDbUsername, String ambariDbPassword) throws IOException {
+
+
+    try {
+
+      Connection con = DataSourceAmbariDatabase.getInstance(ambariDBDriver, ambariDBJdbcUrl, ambariDbUsername, ambariDbPassword).getConnection();
+
+
+    } catch (Exception e) {
+
+      logger.error("Sql exception in acessing Ambari Database: " ,e);
+
+      return false;
+    }
+
+    return true;
+
+  }
+
+  public static String getHomeDir() {
+    return homeDir;
+  }
+
+  public static void setHomeDir(String homeDir) {
+    ConfFileReader.homeDir = homeDir;
+  }
+
+  public static boolean checkNamenodeURIConnectionforambari(String ambariServerNameNode) throws IOException, URISyntaxException {
+
+
+    Configuration conf = new Configuration();
+    conf.set("fs.hdfs.impl",
+      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName());
+    conf.set("fs.file.impl",
+      org.apache.hadoop.fs.LocalFileSystem.class.getName()
+    );
+
+    FileSystem fileSystem = FileSystem.get(new URI(ambariServerNameNode), conf);
+
+
+    if (fileSystem instanceof WebHdfsFileSystem) {
+
+      return true;
+
+    } else {
+
+      return false;
+    }
+
+
+  }
+
+  public static boolean checkNamenodeURIConnectionforHue(String hueServerNamenodeURI) throws IOException, URISyntaxException {
+
+    Configuration conf = new Configuration();
+    conf.set("fs.hdfs.impl",
+      org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+    );
+    conf.set("fs.file.impl",
+      org.apache.hadoop.fs.LocalFileSystem.class.getName()
+    );
+
+    FileSystem fileSystem = FileSystem.get(new URI(hueServerNamenodeURI), conf);
+
+
+    if (fileSystem instanceof WebHdfsFileSystem) {
+
+      return true;
+    } else {
+
+      return false;
+    }
+
+
+  }
+
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java
new file mode 100644
index 0000000..c959e8a
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveHistoryQueryImpl.java
@@ -0,0 +1,562 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.hive;
+
+import java.net.URISyntaxException;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedWriter;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+
+public class HiveHistoryQueryImpl {
+
+  static final Logger logger = Logger.getLogger(HiveHistoryQueryImpl.class);
+
+  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
+
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+
+    XMLOutputter xmlOutput = new XMLOutputter();
+    xmlOutput.setFormat(Format.getPrettyFormat());
+
+    File xmlfile = new File("/var/lib/huetoambari/RevertChange.xml");
+
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+        Element rootNode = doc.getRootElement();
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (JDOMException e) {
+        logger.error("JDOMException" ,e);
+
+      }
+
+    } else {
+
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate.toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+        doc.getRootElement().addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+      } catch (IOException io) {
+        logger.error("JDOMException" , io);
+      }
+
+    }
+
+  }
+
+  public int fetchMaximumIdfromAmbaridb(String driverName, Connection c, int id) throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+    return num;
+  }
+
+  public void insertRowinAmbaridb(String driverName, String dirname, int maxcount, long epochtime, Connection c, int id, String instance, int i) throws SQLException, IOException {
+
+    String maxcount1 = Integer.toString(maxcount);
+    String epochtime1 = Long.toString(epochtime);
+    String ds_id = new String();
+    Statement stmt = null;
+    String sql = "";
+    String revsql = "";
+    stmt = c.createStatement();
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet');";
+      revsql = "delete from  DS_JOBIMPL_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet');";
+      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet')";
+      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='" + maxcount1 + "'";
+
+    }
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+
+    stmt.executeUpdate(sql);
+
+  }
+
+  public int fetchInstanceTablename(String driverName, Connection c, String instance) throws SQLException {
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
+    }
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+    }
+    return id;
+  }
+
+  public long getEpochTime() throws ParseException {
+    long seconds = System.currentTimeMillis() / 1000l;
+    return seconds;
+
+  }
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+      + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+      + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+    return s;
+
+  }
+
+  public String[] fetchFromHue(String username, String startdate, String endtime, Connection connection) throws ClassNotFoundException, SQLException {
+    int id = 0;
+    int i = 0;
+    String[] query = new String[100];
+
+    try {
+      connection.setAutoCommit(false);
+      Statement statement = connection.createStatement();
+
+      ResultSet rs1 = null;
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement.executeQuery("select id from auth_user where username='" + username + "';");
+        while (rs.next()) {
+          id = rs.getInt("id");
+        }
+      }
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory;");
+        } else {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + ";");
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date >= date('" + startdate + "') AND submission_date < date('" + endtime + "');");
+        } else {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date >= date('" + startdate + "') AND submission_date <= date('" + endtime + "');");
+        }
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date >= date('" + startdate + "');");
+        } else {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date >= date('" + startdate + "');");
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where submission_date < date('" + endtime + "');");
+        } else {
+          rs1 = statement.executeQuery("select query from beeswax_queryhistory where owner_id =" + id + " AND submission_date < date('" + endtime + "');");
+        }
+      }
+
+
+      while (rs1.next()) {
+        query[i] = rs1.getString("query");
+        i++;
+      }
+
+      connection.commit();
+
+    } catch (SQLException e) {
+      connection.rollback();
+
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("Sql exception error: " + e);
+      }
+    }
+    return query;
+
+  }
+
+  public void writetoFileQueryhql(String content, String homedir) {
+    try {
+      File file = new File(homedir + "query.hql");
+      // if file doesnt exists, then create it
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(content);
+      bw.close();
+    } catch (IOException e) {
+      logger.error("IOException" , e);
+    }
+
+  }
+
+  public void deleteFileQueryhql(String homedir) {
+    try{
+      File file = new File(homedir + "query.hql");
+
+      if(file.delete()){
+        logger.info("temporary hql file deleted");
+      }else{
+        logger.info("temporary hql file delete failed");
+      }
+
+    }catch(Exception e){
+
+     logger.error("File Exception ",e);
+
+    }
+
+  }
+
+  public void deleteFileQueryLogs(String homedir) {
+    try{
+      File file = new File(homedir + "logs");
+
+      if(file.delete()){
+        logger.info("temporary logs file deleted");
+      }else{
+        logger.info("temporary logs file delete failed");
+      }
+
+    }catch(Exception e){
+
+      logger.error("File Exception ",e);
+
+    }
+
+  }
+
+  public void writetoFileLogs(String homedir) {
+    try {
+      String content = "";
+      File file = new File(homedir + "logs");
+      // if file doesnt exists, then create it
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(content);
+      bw.close();
+    } catch (IOException e) {
+      logger.error("IOException" , e);
+    }
+
+  }
+
+  public void createDir(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      UserGroupInformation.setConfiguration(conf);
+
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+
+        public Boolean run() throws Exception {
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          Boolean b = fs.mkdirs(src);
+          return b;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Exception in Webhdfs" , e);
+    }
+  }
+
+  public void createDirKerberorisedSecured(final String dir, final String namenodeuri) throws IOException,
+    URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+      ugi.doAs(new PrivilegedExceptionAction<Boolean>() {
+
+        public Boolean run() throws Exception {
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          Boolean b = fs.mkdirs(src);
+          return b;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Exception in Webhdfs" , e);
+    }
+  }
+
+
+  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          Configuration conf = new Configuration();
+          conf.set("fs.defaultFS", namenodeuri);
+          conf.set("hadoop.job.ugi", "hdfs");
+          conf.set("fs.hdfs.impl",
+            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+          );
+          conf.set("fs.file.impl",
+            org.apache.hadoop.fs.LocalFileSystem.class.getName()
+          );
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          //	Path pathsource = new Path(source);
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception" , e);
+    }
+
+  }
+
+  public void putFileinHdfsKerborizedSecured(final String source, final String dest, final String namenodeuri)
+    throws IOException {
+
+    try {
+
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception" , e);
+
+    }
+
+  }
+
+}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java
new file mode 100644
index 0000000..3ad481d
--- /dev/null
+++ b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/service/hive/HiveSavedQueryImpl.java
@@ -0,0 +1,778 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ * <p/>
+ * http://www.apache.org/licenses/LICENSE-2.0
+ * <p/>
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.ambari.view.huetoambarimigration.service.hive;
+
+import java.nio.charset.Charset;
+import java.security.PrivilegedExceptionAction;
+import java.sql.Connection;
+import java.sql.DriverManager;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.text.ParseException;
+import java.text.SimpleDateFormat;
+import java.io.BufferedInputStream;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.ByteArrayInputStream;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileWriter;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.GregorianCalendar;
+import java.util.Scanner;
+import java.io.*;
+import java.net.URISyntaxException;
+import java.net.URL;
+
+import org.apache.ambari.view.huetoambarimigration.service.configurationcheck.ConfFileReader;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
+import org.apache.log4j.Logger;
+import org.jdom.Attribute;
+import org.jdom.Document;
+import org.jdom.Element;
+import org.jdom.JDOMException;
+import org.jdom.input.SAXBuilder;
+import org.jdom.output.Format;
+import org.jdom.output.XMLOutputter;
+import org.json.JSONArray;
+import org.json.JSONObject;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.UserGroupInformation;
+
+import org.apache.ambari.view.huetoambarimigration.model.*;
+
+public class HiveSavedQueryImpl {
+
+  static final Logger logger = Logger.getLogger(HiveSavedQueryImpl.class);
+
+  private static String readAll(Reader rd) throws IOException {
+    StringBuilder sb = new StringBuilder();
+    int cp;
+    while ((cp = rd.read()) != -1) {
+      sb.append((char) cp);
+    }
+    return sb.toString();
+  }
+
+  public void wrtitetoalternatesqlfile(String dirname, String content,
+                                       String instance, int i) throws IOException {
+
+    Date dNow = new Date();
+    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
+    String currentDate = ft.format(dNow);
+
+    XMLOutputter xmlOutput = new XMLOutputter();
+
+    xmlOutput.setFormat(Format.getPrettyFormat());
+
+    File xmlfile = new File(ConfFileReader.getHomeDir() + "RevertChange.xml");
+
+    if (xmlfile.exists()) {
+      String iteration = Integer.toString(i + 1);
+      SAXBuilder builder = new SAXBuilder();
+      Document doc;
+      try {
+        doc = (Document) builder.build(xmlfile);
+
+        Element rootNode = doc.getRootElement();
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate
+          .toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        rootNode.addContent(record);
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (JDOMException e) {
+        // TODO Auto-generated catch block
+        logger.error("JDOMException: " , e);
+      }
+
+    } else {
+
+      try {
+        String iteration = Integer.toString(i + 1);
+        Element revertrecord = new Element("RevertChangePage");
+        Document doc = new Document(revertrecord);
+        doc.setRootElement(revertrecord);
+
+        Element record = new Element("RevertRecord");
+        record.setAttribute(new Attribute("id", iteration));
+        record.addContent(new Element("datetime").setText(currentDate
+          .toString()));
+        record.addContent(new Element("dirname").setText(dirname));
+        record.addContent(new Element("instance").setText(instance));
+        record.addContent(new Element("query").setText(content));
+
+        doc.getRootElement().addContent(record);
+
+        xmlOutput.output(doc, new FileWriter(ConfFileReader.getHomeDir() + "RevertChange.xml"));
+
+      } catch (IOException io) {
+
+      }
+
+    }
+
+  }
+
+  public int fetchMaxidforSavedQueryHive(String driverName, Connection c, int id)
+    throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max(cast(ds_id as unsigned) ) as max from DS_SAVEDQUERY_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_savedquery_" + id + ";");
+    }
+
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+
+    }
+
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+
+    return num;
+  }
+
+  public int fetchInstancetablenameForSavedqueryHive(String driverName, Connection c,
+                                                     String instance) throws SQLException {
+
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt
+        .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
+          + instance + "'");
+    } else {
+      rs = stmt
+        .executeQuery("select * from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.savedQueries.SavedQuery' and view_instance_name='"
+          + instance + "';");
+    }
+
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+
+    }
+
+    return id;
+  }
+
+  public int fetchInstanceTablenameHiveHistory(String driverName, Connection c,
+                                               String instance) throws SQLException {
+    String ds_id = new String();
+    int id = 0;
+    Statement stmt = null;
+
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "'");
+    } else {
+      rs = stmt.executeQuery("select id from viewentity where class_name LIKE 'org.apache.ambari.view.hive.resources.jobs.viewJobs.JobImpl' and view_instance_name='" + instance + "';");
+    }
+
+
+    while (rs.next()) {
+      id = rs.getInt("id");
+      System.out.println("id is " + id);
+
+    }
+
+    return id;
+
+  }
+
+  public int fetchMaxdsidFromHiveHistory(String driverName, Connection c, int id)
+    throws SQLException {
+
+    String ds_id = null;
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    ResultSet rs = null;
+
+    if (driverName.contains("postgresql")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id + ";");
+    } else if (driverName.contains("mysql")) {
+      rs = stmt.executeQuery("select max( cast(ds_id as unsigned) ) as max from DS_JOBIMPL_" + id + ";");
+    } else if (driverName.contains("oracle")) {
+      rs = stmt.executeQuery("select MAX(cast(ds_id as integer)) as max from ds_jobimpl_" + id);
+    }
+    while (rs.next()) {
+      ds_id = rs.getString("max");
+    }
+    int num;
+    if (ds_id == null) {
+      num = 1;
+    } else {
+      num = Integer.parseInt(ds_id);
+    }
+    return num;
+  }
+
+
+  /**/
+  public void insertRowHiveHistory(String driverName, String dirname, int maxcount,
+                                   long epochtime, Connection c, int id, String instance, int i)
+    throws SQLException, IOException {
+    String maxcount1 = Integer.toString(maxcount);
+
+    String epochtime1 = Long.toString(epochtime);
+
+    String ds_id = new String();
+    Statement stmt = null;
+
+    stmt = c.createStatement();
+    String sql = "";
+    String revsql = "";
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_JOBIMPL_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet');";
+
+      revsql = "delete from  DS_JOBIMPL_" + id + " where ds_id='"
+        + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet');";
+
+      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='"
+        + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_jobimpl_" + id + " values ('" + maxcount1
+        + "','','','','','default'," + epochtime1 + ",0,'','','"
+        + dirname + "logs','admin','" + dirname
+        + "query.hql','','job','','','Unknown','" + dirname
+        + "','','Worksheet')";
+      revsql = "delete from  ds_jobimpl_" + id + " where ds_id='"
+        + maxcount1 + "'";
+
+    }
+    stmt.executeUpdate(sql);
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+  }
+
+  public void insertRowinSavedQuery(String driverName, int maxcount, String database,
+                                    String dirname, String query, String name, Connection c, int id,
+                                    String instance, int i) throws SQLException, IOException {
+    String maxcount1 = Integer.toString(maxcount);
+
+    String ds_id = new String();
+    Statement stmt = null;
+    String sql = "";
+    String revsql = "";
+    stmt = c.createStatement();
+
+    if (driverName.contains("mysql")) {
+      sql = "INSERT INTO DS_SAVEDQUERY_" + id + " values ('"
+        + maxcount1 + "','" + database + "','" + "admin" + "','"
+        + dirname + "query.hql','" + query + "','" + name + "');";
+
+      revsql = "delete from  DS_SAVEDQUERY_" + id + " where ds_id='"
+        + maxcount1 + "';";
+
+    } else if (driverName.contains("postgresql")) {
+      sql = "INSERT INTO ds_savedquery_" + id + " values ('"
+        + maxcount1 + "','" + database + "','" + "admin" + "','"
+        + dirname + "query.hql','" + query + "','" + name + "');";
+
+      revsql = "delete from  ds_savedquery_" + id + " where ds_id='"
+        + maxcount1 + "';";
+
+    } else if (driverName.contains("oracle")) {
+      sql = "INSERT INTO ds_savedquery_" + id + " values ('"
+        + maxcount1 + "','" + database + "','" + "admin" + "','"
+        + dirname + "query.hql','" + query + "','" + name + "')";
+
+      revsql = "delete from  ds_savedquery_" + id + " where ds_id='"
+        + maxcount1 + "'";
+
+    }
+    wrtitetoalternatesqlfile(dirname, revsql, instance, i);
+    stmt.executeUpdate(sql);
+  }
+
+  public long getEpochTime() throws ParseException {
+
+    long seconds = System.currentTimeMillis() / 1000l;
+    return seconds;
+
+  }
+
+  public String getTime() throws ParseException {
+    int day, month, year;
+    int second, minute, hour;
+    int milisecond;
+    GregorianCalendar date = new GregorianCalendar();
+
+    day = date.get(Calendar.DAY_OF_MONTH);
+    month = date.get(Calendar.MONTH);
+    year = date.get(Calendar.YEAR);
+
+    second = date.get(Calendar.SECOND);
+    minute = date.get(Calendar.MINUTE);
+    hour = date.get(Calendar.HOUR);
+    milisecond = date.get(Calendar.MILLISECOND);
+
+    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+      + minute;
+    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-"
+      + minute + "-" + second + "-" + milisecond;
+    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
+    Date date1 = df.parse(s1);
+    long epoch = date1.getTime();
+
+    return s;
+
+  }
+
+  public ArrayList<PojoHive> fetchFromHuedb(String username,
+                                            String startdate, String endtime, Connection connection)
+    throws ClassNotFoundException, IOException {
+    int id = 0;
+    int i = 0;
+    String[] query = new String[100];
+    ArrayList<PojoHive> hiveArrayList = new ArrayList<PojoHive>();
+    ResultSet rs1 = null;
+
+    try {
+      Statement statement = connection.createStatement();
+      if (username.equals("all")) {
+      } else {
+        ResultSet rs = statement
+          .executeQuery("select id from auth_user where username='"
+            + username + "';");
+        while (rs.next()) {
+
+          id = rs.getInt("id");
+
+        }
+
+      }
+      if (startdate.equals("") && endtime.equals("")) {
+        if (username.equals("all")) {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery;");
+
+        } else {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
+              + id + ";");
+        }
+
+      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime >= date('"
+              + startdate
+              + "') AND mtime <= date('"
+              + endtime + "');");
+        } else {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
+              + id
+              + " AND mtime >= date('"
+              + startdate
+              + "') AND mtime <= date('"
+              + endtime
+              + "');");
+        }
+
+      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and  mtime >= date('"
+              + startdate + "');");
+        } else {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
+              + id
+              + " AND mtime >= date('"
+              + startdate
+              + "');");
+        }
+
+      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
+        if (username.equals("all")) {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query' AND mtime <= date('"
+              + endtime + "');");
+        } else {
+          rs1 = statement
+            .executeQuery("select data,name,owner_id from beeswax_savedquery where name!='My saved query'and owner_id ="
+              + id
+              + " AND mtime <= date('"
+              + endtime
+              + "');");
+        }
+
+      }
+      while (rs1.next()) {
+        PojoHive hivepojo = new PojoHive();
+        String name = rs1.getString("name");
+        String temp = rs1.getString("data");
+        InputStream is = new ByteArrayInputStream(temp.getBytes());
+        BufferedReader rd = new BufferedReader(new InputStreamReader(
+          is, Charset.forName("UTF-8")));
+        String jsonText = readAll(rd);
+        JSONObject json = new JSONObject(jsonText);
+        String resources = json.get("query").toString();
+        json = new JSONObject(resources);
+
+        String resarr = (json.get("query")).toString();
+
+        json = new JSONObject(resources);
+        String database = (json.get("database")).toString();
+        hivepojo.setQuery(resarr);
+        hivepojo.setDatabase(database);
+        hivepojo.setOwner(name);
+        hiveArrayList.add(hivepojo);
+        i++;
+      }
+
+    } catch (SQLException e) {
+      // if the error message is "out of memory",
+      // it probably means no database file is found
+      System.err.println(e.getMessage());
+    } finally {
+      try {
+        if (connection != null)
+          connection.close();
+      } catch (SQLException e) {
+        logger.error("sql connection exception" , e);
+      }
+    }
+
+    return hiveArrayList;
+
+  }
+
+
+  public void writetoFilequeryHql(String content, String homedir) {
+    try {
+      File file = new File(homedir + "query.hql");
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(content);
+      bw.close();
+
+    } catch (IOException e) {
+      logger.error("IOException: " , e);
+    }
+
+  }
+
+  public void deleteFileQueryhql(String homedir) {
+    try{
+      File file = new File(homedir + "query.hql");
+
+      if(file.delete()){
+        logger.info("temporary hql file deleted");
+      }else{
+        logger.info("temporary hql file delete failed");
+      }
+
+    }catch(Exception e){
+
+      logger.error("File Exception ",e);
+
+    }
+
+  }
+
+  public void deleteFileQueryLogs(String homedir) {
+    try{
+      File file = new File(homedir + "logs");
+
+      if(file.delete()){
+        logger.info("temporary logs file deleted");
+      }else{
+        logger.info("temporary logs file delete failed");
+      }
+
+    }catch(Exception e){
+
+      logger.error("File Exception ",e);
+
+    }
+
+  }
+
+
+  public void writetoFileLogs(String homedir) {
+    try {
+
+      String content = "";
+      File file = new File(homedir + "logs");
+
+      // if file doesnt exists, then create it
+      if (!file.exists()) {
+        file.createNewFile();
+      }
+
+      FileWriter fw = new FileWriter(file.getAbsoluteFile());
+      BufferedWriter bw = new BufferedWriter(fw);
+      bw.write(content);
+      bw.close();
+
+    } catch (IOException e) {
+      logger.error("IOException: " , e);
+    }
+
+  }
+
+  public void createDirHive(final String dir, final String namenodeuri)
+    throws IOException, URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs: " , e);
+    }
+  }
+
+  public void createDirHiveSecured(final String dir, final String namenodeuri)
+    throws IOException, URISyntaxException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fs = FileSystem.get(conf);
+          Path src = new Path(dir);
+          fs.mkdirs(src);
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs: " , e);
+    }
+  }
+
+  public void putFileinHdfs(final String source, final String dest,
+                            final String namenodeuri) throws IOException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+          FileSystem fileSystem = FileSystem.get(conf);
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception" , e);
+    }
+
+  }
+
+
+  public void putFileinHdfsSecured(final String source, final String dest,
+                                   final String namenodeuri) throws IOException {
+
+    try {
+      final Configuration conf = new Configuration();
+
+      conf.set("fs.hdfs.impl",
+        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
+      );
+      conf.set("fs.file.impl",
+        org.apache.hadoop.fs.LocalFileSystem.class.getName()
+      );
+      conf.set("fs.defaultFS", namenodeuri);
+      conf.set("hadoop.job.ugi", "hdfs");
+      conf.set("hadoop.security.authentication", "Kerberos");
+
+      UserGroupInformation.setConfiguration(conf);
+      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
+      ugi.doAs(new PrivilegedExceptionAction<Void>() {
+
+        public Void run() throws Exception {
+
+
+          FileSystem fileSystem = FileSystem.get(conf);
+
+          String filename = source.substring(
+            source.lastIndexOf('/') + 1, source.length());
+          String dest1;
+          if (dest.charAt(dest.length() - 1) != '/') {
+            dest1 = dest + "/" + filename;
+          } else {
+            dest1 = dest + filename;
+          }
+
+          Path path = new Path(dest1);
+          if (fileSystem.exists(path)) {
+
+          }
+          // Path pathsource = new Path(source);
+          FSDataOutputStream out = fileSystem.create(path);
+
+          InputStream in = new BufferedInputStream(
+            new FileInputStream(new File(source)));
+
+          byte[] b = new byte[1024];
+          int numBytes = 0;
+          while ((numBytes = in.read(b)) > 0) {
+            out.write(b, 0, numBytes);
+          }
+          in.close();
+          out.close();
+          fileSystem.close();
+
+
+          return null;
+        }
+      });
+    } catch (Exception e) {
+      logger.error("Webhdfs exception" , e);
+    }
+
+  }
+
+}


[06/11] ambari git commit: Revert "AMBARI-17079. Moved Hue to Ambari migrator to standard view architecture (Pradarttana Panda via dipayanb)"

Posted by al...@apache.org.
http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
deleted file mode 100644
index 9f9e053..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryMigrationUtility.java
+++ /dev/null
@@ -1,281 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.HiveModel;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationModel;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.MysqlQuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.OracleQuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.PostgressQuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.hive.savedqueryset.QuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.hive.savedqueryset.*;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import org.apache.log4j.Logger;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-public class HiveSavedQueryMigrationUtility {
-
-
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  public MigrationModel hiveSavedQueryMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
-
-    long startTime = System.currentTimeMillis();
-
-    final Logger logger = Logger.getLogger(HiveSavedQueryMigrationUtility.class);
-
-    Connection connectionAmbaridb = null;
-    Connection connectionHuedb = null;
-
-    int i = 0;
-
-    logger.info("-------------------------------------");
-    logger.info("hive saved query Migration started");
-    logger.info("-------------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + instance);
-    logger.info("hue username is : " + username);
-
-    HiveSavedQueryMigrationImplementation hivesavedqueryimpl = new HiveSavedQueryMigrationImplementation();/* creating Implementation object  */
-
-    QuerySet huedatabase=null;
-
-    if(view.getProperties().get("huedrivername").contains("mysql"))
-    {
-      huedatabase=new MysqlQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("postgresql"))
-    {
-      huedatabase=new PostgressQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("sqlite"))
-    {
-     huedatabase=new SqliteQuerySet();
-    }
-    else if (view.getProperties().get("huedrivername").contains("oracle"))
-    {
-      huedatabase=new OracleQuerySet();
-    }
-
-
-    QuerySetAmbariDB ambaridatabase=null;
-
-
-    if(view.getProperties().get("ambaridrivername").contains("mysql"))
-    {
-      ambaridatabase=new MysqlQuerySetAmbariDB();
-    }
-    else if(view.getProperties().get("ambaridrivername").contains("postgresql"))
-    {
-      ambaridatabase=new PostgressQuerySetAmbariDB();
-    }
-    else if (view.getProperties().get("ambaridrivername").contains("oracle"))
-    {
-      ambaridatabase= new OracleQuerySetAmbariDB();
-    }
-
-    int maxcountForHivehistroryAmbaridb, maxCountforSavequeryAmbaridb;
-    String time = null;
-    Long epochtime = null;
-    String dirNameforHiveSavedquery;
-    ArrayList<HiveModel> dbpojoHiveSavedQuery = new ArrayList<HiveModel>();
-
-    try {
-
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection(); /* fetching connection to hue DB */
-
-      dbpojoHiveSavedQuery = hivesavedqueryimpl.fetchFromHuedb(username, startDate, endDate, connectionHuedb,huedatabase); /* fetching data from hue db and storing it in to a model */
-
-
-      for(int j=0;j<dbpojoHiveSavedQuery.size();j++)
-      {
-        logger.info("the query fetched from hue"+dbpojoHiveSavedQuery.get(j).getQuery());
-
-      }
-
-
-      if (dbpojoHiveSavedQuery.size() == 0) /* if no data has been fetched from hue db according to search criteria */ {
-
-        migrationresult.setIsNoQuerySelected("yes");
-        migrationresult.setProgressPercentage(0);
-        migrationresult.setNumberOfQueryTransfered(0);
-        migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
-        getResourceManager(view).update(migrationresult, jobid);
-        logger.info("No queries has been selected acccording to your criteria");
-
-        logger.info("no hive saved query has been selected from hue according to your criteria of searching");
-
-
-      } else {
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();/* connecting to ambari DB */
-        connectionAmbaridb.setAutoCommit(false);
-
-        for (i = 0; i < dbpojoHiveSavedQuery.size(); i++) {
-
-          logger.info("_____________________");
-          logger.info("Loop No." + (i + 1));
-          logger.info("_____________________");
-
-          float calc = ((float) (i + 1)) / dbpojoHiveSavedQuery.size() * 100;
-          int progressPercentage = Math.round(calc);
-
-          migrationresult.setIsNoQuerySelected("no");
-          migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i+1);
-          migrationresult.setTotalNoQuery(dbpojoHiveSavedQuery.size());
-          getResourceManager(view).update(migrationresult, jobid);
-
-
-
-
-          logger.info("query fetched from hue:-  " + dbpojoHiveSavedQuery.get(i).getQuery());
-
-          int tableIdSavedQuery = hivesavedqueryimpl.fetchInstancetablenameForSavedqueryHive(connectionAmbaridb, instance,ambaridatabase); /* fetching the instance table name for migration saved query  from the given instance name */
-
-          int tableIdHistoryHive = hivesavedqueryimpl.fetchInstanceTablenameHiveHistory(connectionAmbaridb, instance,ambaridatabase); /* fetching the instance table name for migration history query from the given instance name */
-
-          logger.info("Table name are fetched from instance name.");
-
-          hivesavedqueryimpl.writetoFilequeryHql(dbpojoHiveSavedQuery.get(i).getQuery(), ConfigurationCheckImplementation.getHomeDir()); /* writing migration query to a local file*/
-
-          hivesavedqueryimpl.writetoFileLogs(ConfigurationCheckImplementation.getHomeDir());/* writing logs to localfile */
-
-          logger.info(".hql and logs file are saved in temporary directory");
-
-          maxcountForHivehistroryAmbaridb = (hivesavedqueryimpl.fetchMaxdsidFromHiveHistory( connectionAmbaridb, tableIdHistoryHive,ambaridatabase) + 1);/* fetching the maximum ds_id from migration history table*/
-
-          maxCountforSavequeryAmbaridb = (hivesavedqueryimpl.fetchMaxidforSavedQueryHive(connectionAmbaridb, tableIdSavedQuery,ambaridatabase) + 1);/* fetching the maximum ds_id from migration saved query table*/
-
-          time = hivesavedqueryimpl.getTime();/* getting system time */
-
-          epochtime = hivesavedqueryimpl.getEpochTime();/* getting epoch time */
-
-          dirNameforHiveSavedquery = "/user/admin/migration/jobs/migration-job-" + maxcountForHivehistroryAmbaridb + "-"
-            + time + "/"; // creating hdfs directory name
-
-          logger.info("Directory will be creted in HDFS" + dirNameforHiveSavedquery);
-
-          hivesavedqueryimpl.insertRowHiveHistory(dirNameforHiveSavedquery,maxcountForHivehistroryAmbaridb,epochtime,connectionAmbaridb,tableIdHistoryHive,instance,i,ambaridatabase);// inserting to migration history table
-
-          logger.info("Row inserted in hive History table.");
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            logger.info("Kerberose Enabled");
-            hivesavedqueryimpl.createDirHiveSecured(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs in kerborized cluster
-            hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs in kerberoroized cluster
-            hivesavedqueryimpl.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs in kerberoroized cluster
-
-          } else {
-
-            logger.info("Kerberose Not Enabled");
-            hivesavedqueryimpl.createDirHive(dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// creating directory in hdfs
-            hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "query.hql", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting .hql file in hdfs directory
-            hivesavedqueryimpl.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + "logs", dirNameforHiveSavedquery, view.getProperties().get("namenode_URI_Ambari"));// putting logs file in hdfs
-          }
-
-          //inserting into hived saved query table
-          //6.
-          hivesavedqueryimpl.insertRowinSavedQuery(maxCountforSavequeryAmbaridb, dbpojoHiveSavedQuery.get(i).getDatabase(), dirNameforHiveSavedquery, dbpojoHiveSavedQuery.get(i).getQuery(), dbpojoHiveSavedQuery.get(i).getOwner(), connectionAmbaridb, tableIdSavedQuery, instance, i,ambaridatabase);
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-
-
-    } catch (SQLException e) {
-
-      logger.error("SQL exception: ", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("roll back done");
-      } catch (SQLException e1) {
-        logger.error("Rollback error: ", e1);
-
-      }
-    } catch (ClassNotFoundException e1) {
-      logger.error("Class not found : " , e1);
-    } catch (ParseException e) {
-      logger.error("ParseException: " , e);
-    } catch (URISyntaxException e) {
-      logger.error("URISyntaxException: " , e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException:" , e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("Error in connection close", e);
-        }
-    }
-
-
-    hivesavedqueryimpl.deleteFileQueryhql(ConfigurationCheckImplementation.getHomeDir());
-    hivesavedqueryimpl.deleteFileQueryLogs(ConfigurationCheckImplementation.getHomeDir());
-
-    long stopTime = System.currentTimeMillis();
-    long elapsedTime = stopTime - startTime;
-
-    MigrationModel model=new MigrationModel();
-
-    migrationresult.setJobtype("hivesavedquerymigration");
-    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-    getResourceManager(view).update(migrationresult, jobid);
-
-
-
-    logger.info("-------------------------------");
-    logger.info("hive saved query Migration end");
-    logger.info("--------------------------------");
-
-    return model;
-
-  }
-}
-
-
-
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
deleted file mode 100644
index 0445132..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/hive/savedquery/HiveSavedQueryStartJob.java
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.hive.savedquery;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class HiveSavedQueryStartJob extends Thread{
-
-  String username;
-  String instance;
-  String startdate;
-  String enddate;
-  String jobid;
-  ViewContext view;
-
-  public HiveSavedQueryStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
-    this.username = username;
-    this.instance=instance;
-    this.startdate=startdate;
-    this.enddate=enddate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setUserNameofhue(username);
-    migrationresult.setProgressPercentage(0);
-
-    JSONObject response = new JSONObject();
-
-    /**
-     * creating a separate thread
-     */
-
-    HiveSavedQueryMigrationUtility hivesavedquery=new HiveSavedQueryMigrationUtility();
-    try {
-      hivesavedquery.hiveSavedQueryMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
deleted file mode 100644
index 64e7069..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationImplementation.java
+++ /dev/null
@@ -1,532 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
-
-import java.security.PrivilegedExceptionAction;
-import java.sql.*;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.io.File;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-import java.io.*;
-import java.net.URISyntaxException;
-;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.QuerySet;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataInputStream;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-
-import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-
-public class PigJobMigrationImplementation {
-
-  static final Logger logger = Logger.getLogger(PigJobMigrationImplementation.class);
-
-  private static String readAll(Reader rd) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    int cp;
-    while ((cp = rd.read()) != -1) {
-      sb.append((char) cp);
-    }
-    return sb.toString();
-  }
-
-  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
-    Date dNow = new Date();
-    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
-    String currentDate = ft.format(dNow);
-    XMLOutputter xmlOutput = new XMLOutputter();
-    xmlOutput.setFormat(Format.getPrettyFormat());
-    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
-    if (xmlfile.exists()) {
-      String iteration = Integer.toString(i + 1);
-      SAXBuilder builder = new SAXBuilder();
-      Document doc;
-      try {
-        doc = (Document) builder.build(xmlfile);
-        Element rootNode = doc.getRootElement();
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-        rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
-      } catch (JDOMException e) {
-
-        logger.error("Jdom Exception: ", e);
-      }
-
-
-    } else {
-      // create
-      try {
-        String iteration = Integer.toString(i + 1);
-        Element revertrecord = new Element("RevertChangePage");
-        Document doc = new Document(revertrecord);
-        doc.setRootElement(revertrecord);
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-        doc.getRootElement().addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
-      } catch (IOException io) {
-        logger.error("Jdom Exception: ", io);
-      }
-
-    }
-
-  }
-
-  public int fetchMaxIdforPigJob(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-
-    String ds_id = null;
-    ResultSet rs = null;
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-
-  }
-
-  public int fetchInstanceTablename(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-    PreparedStatement prSt = null;
-
-
-    ResultSet rs = null;
-
-
-    prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
-
-    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-    }
-    return id;
-  }
-
-  public void insertRowPigJob(String dirname, int maxcountforpigjob, String time, String time2, long epochtime, String title, Connection c, int id, String status, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
-
-    String epochtime1 = Long.toString(epochtime);
-    String maxcountforpigjob1 = Integer.toString(maxcountforpigjob);
-    String ds_id = new String();
-    String revSql;
-
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.insertToPigJob(dirname, maxcountforpigjob1, epochtime, title, c, id, status);
-
-    prSt.executeUpdate();
-
-    revSql = ambaridatabase.revertSql(id, maxcountforpigjob1);
-
-    wrtitetoalternatesqlfile(dirname, revSql, instance, i);
-
-  }
-
-  public long getEpochTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-    return epoch;
-
-  }
-
-  public String getTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute;
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-    return s;
-
-  }
-
-  public String getTimeInorder() throws ParseException {
-    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
-    Date now = new Date();
-    String strDate = sdfDate.format(now);
-    return strDate;
-  }
-
-  public ArrayList<PigModel> fetchFromHueDB(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, IOException {
-    int id = 0;
-    int i = 0;
-    String[] query = new String[100];
-    ArrayList<PigModel> pigjobarraylist = new ArrayList<PigModel>();
-    try {
-      connection.setAutoCommit(false);
-      PreparedStatement prSt = null;
-      Statement statement = connection.createStatement();
-      ResultSet rs;
-
-      ResultSet rs1 = null;
-      if (username.equals("all")) {
-      } else {
-
-        prSt = huedatabase.getUseridfromUserName(connection, username);
-
-        rs = prSt.executeQuery();
-
-        while (rs.next()) {
-          id = rs.getInt("id");
-        }
-      }
-
-      if (startdate.equals("") && endtime.equals("")) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
-
-        }
-
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
-
-        }
-      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
-
-        }
-
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
-        }
-
-
-      }
-
-      rs1 = prSt.executeQuery();
-
-
-      while (rs1.next()) {
-        PigModel pigjjobobject = new PigModel();
-
-        int runstatus = rs1.getInt("status");
-
-        if (runstatus == 1) {
-          pigjjobobject.setStatus("RUNNING");
-        } else if (runstatus == 2) {
-          pigjjobobject.setStatus("SUCCEEDED");
-        } else if (runstatus == 3) {
-          pigjjobobject.setStatus("SUBMIT_FAILED");
-        } else if (runstatus == 4) {
-          pigjjobobject.setStatus("KILLED");
-        }
-        String title = rs1.getString("script_title");
-
-
-        pigjjobobject.setTitle(title);
-        String dir = rs1.getString("statusdir");
-        pigjjobobject.setDir(dir);
-        Date created_data = rs1.getDate("start_time");
-        pigjjobobject.setDt(created_data);
-
-        pigjobarraylist.add(pigjjobobject);
-
-        i++;
-      }
-
-
-    } catch (SQLException e) {
-      logger.error("Sqlexception: ", e);
-    } finally {
-      try {
-        if (connection != null)
-          connection.close();
-      } catch (SQLException e) {
-        logger.error("Sqlexception in closing the connection: ", e);
-
-      }
-    }
-
-    return pigjobarraylist;
-
-  }
-
-  public void createDirPigJob(final String dir, final String namenodeuri) throws IOException,
-    URISyntaxException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration conf = new Configuration();
-          conf.set("fs.hdfs.impl",
-            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-          );
-          conf.set("fs.file.impl",
-            org.apache.hadoop.fs.LocalFileSystem.class.getName()
-          );
-          conf.set("fs.defaultFS", namenodeuri);
-          conf.set("hadoop.job.ugi", "hdfs");
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
-    }
-  }
-
-  /**/
-  public void createDirPigJobSecured(final String dir, final String namenodeuri) throws IOException,
-    URISyntaxException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-
-          FileSystem fs = FileSystem.get(conf);
-          Path src = new Path(dir);
-          fs.mkdirs(src);
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
-    }
-  }
-
-  /**/
-  public void copyFileBetweenHdfs(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
-    throws IOException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration confAmbari = new Configuration();
-          confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
-          confAmbari.set("hadoop.job.ugi", "hdfs");
-          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
-
-          Configuration confHue = new Configuration();
-          confHue.set("fs.defaultFS", nameNodeuriAmbari);
-          confHue.set("hadoop.job.ugi", "hdfs");
-          FileSystem fileSystemHue = FileSystem.get(confHue);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path1 = new Path(source);
-          FSDataInputStream in1 = fileSystemHue.open(path1);
-
-          Path path = new Path(dest1);
-          if (fileSystemAmbari.exists(path)) {
-
-          }
-
-          FSDataOutputStream out = fileSystemAmbari.create(path);
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in1.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in1.close();
-          out.close();
-          fileSystemAmbari.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
-    }
-
-  }
-
-  /**/
-  public void copyFileBetweenHdfsSecured(final String source, final String dest, final String nameNodeuriAmbari, final String nameNodeuriHue)
-    throws IOException {
-
-    try {
-
-      final Configuration confAmbari = new Configuration();
-      confAmbari.set("fs.defaultFS", nameNodeuriAmbari);
-      confAmbari.set("hadoop.job.ugi", "hdfs");
-
-      final Configuration confHue = new Configuration();
-      confHue.set("fs.defaultFS", nameNodeuriAmbari);
-      confHue.set("hadoop.job.ugi", "hdfs");
-
-      confAmbari.set("hadoop.security.authentication", "Kerberos");
-      confHue.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fileSystemAmbari = FileSystem.get(confAmbari);
-
-          FileSystem fileSystemHue = FileSystem.get(confHue);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path1 = new Path(source);
-          FSDataInputStream in1 = fileSystemHue.open(path1);
-
-          Path path = new Path(dest1);
-          if (fileSystemAmbari.exists(path)) {
-
-          }
-          FSDataOutputStream out = fileSystemAmbari.create(path);
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in1.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in1.close();
-          out.close();
-          fileSystemAmbari.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs exception: ", e);
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
deleted file mode 100644
index 5d99b49..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobMigrationUtility.java
+++ /dev/null
@@ -1,238 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.net.URISyntaxException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-import org.apache.ambari.view.ViewContext;
-
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import org.apache.log4j.Logger;
-
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.jobqueryset.*;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.jobqueryset.*;
-
-public class PigJobMigrationUtility  {
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-  public void pigJobMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
-
-    long startTime = System.currentTimeMillis();
-
-    final Logger logger = Logger.getLogger(PigJobMigrationUtility.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    logger.info("------------------------------");
-    logger.info("pig Jobs Migration started");
-    logger.info("------------------------------");
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + username);
-    logger.info("hue username is : " + instance);
-
-    PigJobMigrationImplementation pigjobimpl = new PigJobMigrationImplementation();// creating the implementation object
-
-    QuerySet huedatabase=null;
-
-    if(view.getProperties().get("huedrivername").contains("mysql"))
-    {
-      huedatabase=new MysqlQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("postgresql"))
-    {
-      huedatabase=new PostgressQuerySet();
-    }
-    else if(view.getProperties().get("huedrivername").contains("sqlite"))
-    {
-      huedatabase=new SqliteQuerySet();
-    }
-    else if (view.getProperties().get("huedrivername").contains("oracle"))
-    {
-      huedatabase=new OracleQuerySet();
-    }
-
-    QuerySetAmbariDB ambaridatabase=null;
-
-    if(view.getProperties().get("ambaridrivername").contains("mysql"))
-    {
-      ambaridatabase=new MysqlQuerySetAmbariDB();
-    }
-    else if(view.getProperties().get("ambaridrivername").contains("postgresql"))
-    {
-      ambaridatabase=new PostgressQuerySetAmbariDB();
-    }
-    else if (view.getProperties().get("ambaridrivername").contains("oracle"))
-    {
-      ambaridatabase= new OracleQuerySetAmbariDB();
-    }
-    int maxCountforPigScript = 0,i=0;
-
-    String time = null, timeIndorder = null;
-    Long epochtime = null;
-    String pigJobDirName;
-    ArrayList<PigModel> pigJobDbPojo = new ArrayList<PigModel>();
-
-    try {
-
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connecting to hue database
-
-      pigJobDbPojo = pigjobimpl.fetchFromHueDB(username, startDate, endDate, connectionHuedb,huedatabase);// fetching the PigJobs details from hue
-
-      for(int j=0;j<pigJobDbPojo.size();j++)
-      {
-        logger.info("the query fetched from hue="+pigJobDbPojo.get(i).getScript());
-
-      }
-
-			/*No pig Job details has been fetched accordring to search criteria*/
-      if (pigJobDbPojo.size() == 0) {
-
-        migrationresult.setIsNoQuerySelected("yes");
-        migrationresult.setProgressPercentage(0);
-        migrationresult.setNumberOfQueryTransfered(0);
-        migrationresult.setTotalNoQuery(pigJobDbPojo.size());
-        getResourceManager(view).update(migrationresult, jobid);
-        logger.info("no pig Job has been selected from hue according to your criteria of searching");
-
-      } else {
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();
-        connectionAmbaridb.setAutoCommit(false);
-
-        for (i = 0; i < pigJobDbPojo.size(); i++) {
-
-          float calc = ((float) (i + 1)) / pigJobDbPojo.size() * 100;
-          int progressPercentage = Math.round(calc);
-          migrationresult.setIsNoQuerySelected("no");
-          migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i+1);
-          migrationresult.setTotalNoQuery(pigJobDbPojo.size());
-          getResourceManager(view).update(migrationresult, jobid);
-
-
-
-
-          logger.info("Loop No." + (i + 1));
-          logger.info("________________");
-          logger.info("the title of script " + pigJobDbPojo.get(i).getTitle());
-
-          int fetchPigTablenameInstance = pigjobimpl.fetchInstanceTablename(connectionAmbaridb, instance,ambaridatabase);
-
-          maxCountforPigScript = (pigjobimpl.fetchMaxIdforPigJob(connectionAmbaridb, fetchPigTablenameInstance,ambaridatabase) + 1);
-
-          time = pigjobimpl.getTime();
-          timeIndorder = pigjobimpl.getTimeInorder();
-          epochtime = pigjobimpl.getEpochTime();
-
-          pigJobDirName = "/user/admin/pig/jobs/" + pigJobDbPojo.get(i).getTitle() + "_" + time + "/";
-
-          pigjobimpl.insertRowPigJob(pigJobDirName, maxCountforPigScript, time, timeIndorder, epochtime, pigJobDbPojo.get(i).getTitle(), connectionAmbaridb, fetchPigTablenameInstance, pigJobDbPojo.get(i).getStatus(), instance, i,ambaridatabase);
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-
-            pigjobimpl.createDirPigJobSecured(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfsSecured(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-
-          } else {
-
-            pigjobimpl.createDirPigJob(pigJobDirName, view.getProperties().get("namenode_URI_Ambari"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/script.pig", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stderr", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-            pigjobimpl.copyFileBetweenHdfs(pigJobDbPojo.get(i).getDir() + "/stdout", pigJobDirName, view.getProperties().get("namenode_URI_Ambari"), view.getProperties().get("namenode_URI_Hue"));
-
-          }
-
-          logger.info(pigJobDbPojo.get(i).getTitle() + "has been migrated to Ambari");
-
-        }
-        connectionAmbaridb.commit();
-      }
-
-    } catch (SQLException e) {
-      logger.error("sql exception in ambari database:", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("roll back done");
-      } catch (SQLException e1) {
-        logger.error("roll back  exception:",e1);
-      }
-    } catch (ClassNotFoundException e2) {
-      logger.error("class not found exception:",e2);
-    } catch (ParseException e) {
-      logger.error("ParseException: " ,e);
-    } catch (URISyntaxException e) {
-      logger.error("URISyntaxException" ,e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException" ,e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("connection closing exception ", e);
-        }
-    }
-
-    logger.info("------------------------------");
-    logger.info("pig Job Migration End");
-    logger.info("------------------------------");
-
-    //session.setAttribute(ProgressBarStatus.TASK_PROGRESS_VARIABLE, 0);
-
-//    CheckProgresStatus.setProgressPercentage(0);
-//    CheckProgresStatus.setNoOfQueryCompleted(0);
-//    CheckProgresStatus.setTotalNoOfQuery(0);
-//    CheckProgresStatus.setNoOfQueryLeft(0);
-
-    long stopTime = System.currentTimeMillis();
-    long elapsedTime = stopTime - startTime;
-
-    migrationresult.setJobtype("hivehistoryquerymigration");
-    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-    getResourceManager(view).update(migrationresult, jobid);
-
-
-  }
-
-}
-

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
deleted file mode 100644
index 15f033f..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigjob/PigJobStartJob.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class PigJobStartJob extends Thread{
-
-  String username;
-  String instance;
-  String startdate;
-  String enddate;
-  String jobid;
-  ViewContext view;
-
-  public PigJobStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
-    this.username = username;
-    this.instance=instance;
-    this.startdate=startdate;
-    this.enddate=enddate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setUserNameofhue(username);
-    migrationresult.setProgressPercentage(0);
-
-    PigJobMigrationUtility pigjobmigration=new PigJobMigrationUtility();
-    try {
-      pigjobmigration.pigJobMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
deleted file mode 100644
index c5f073c..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigSavedScriptStartJob.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class PigSavedScriptStartJob extends Thread{
-
-  String username;
-  String instance;
-  String startdate;
-  String enddate;
-  String jobid;
-  ViewContext view;
-
-  public PigSavedScriptStartJob(String username, String instance, String startdate, String enddate, String jobid, ViewContext view) {
-    this.username = username;
-    this.instance=instance;
-    this.startdate=startdate;
-    this.enddate=enddate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setUserNameofhue(username);
-    migrationresult.setProgressPercentage(0);
-
-    PigScriptMigrationUtility pigsavedscript =new PigScriptMigrationUtility();
-    try {
-      pigsavedscript.pigScriptMigration(username,instance,startdate,enddate,view,migrationresult,jobid);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
deleted file mode 100644
index c8aa1c0..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationImplementation.java
+++ /dev/null
@@ -1,504 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
-
-import org.apache.ambari.view.huetoambarimigration.migration.pig.pigjob.PigJobMigrationImplementation;
-import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset.QuerySetAmbariDB;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset.QuerySet;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.log4j.Logger;
-import org.jdom.Attribute;
-import org.jdom.Document;
-import org.jdom.Element;
-import org.jdom.JDOMException;
-import org.jdom.input.SAXBuilder;
-import org.jdom.output.Format;
-import org.jdom.output.XMLOutputter;
-
-import java.io.*;
-import java.security.PrivilegedExceptionAction;
-import java.sql.*;
-import java.text.ParseException;
-import java.text.SimpleDateFormat;
-import java.util.ArrayList;
-import java.util.Calendar;
-import java.util.Date;
-import java.util.GregorianCalendar;
-
-
-public class PigScriptMigrationImplementation {
-
-  static final Logger logger = Logger.getLogger(PigJobMigrationImplementation.class);
-
-  private static String readAll(Reader rd) throws IOException {
-    StringBuilder sb = new StringBuilder();
-    int cp;
-    while ((cp = rd.read()) != -1) {
-      sb.append((char) cp);
-    }
-    return sb.toString();
-  }
-
-  public void wrtitetoalternatesqlfile(String dirname, String content, String instance, int i) throws IOException {
-
-    Date dNow = new Date();
-    SimpleDateFormat ft = new SimpleDateFormat("YYYY-MM-dd hh:mm:ss");
-    String currentDate = ft.format(dNow);
-
-    XMLOutputter xmlOutput = new XMLOutputter();
-
-    xmlOutput.setFormat(Format.getPrettyFormat());
-
-    File xmlfile = new File(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml");
-
-    if (xmlfile.exists()) {
-      String iteration = Integer.toString(i + 1);
-      SAXBuilder builder = new SAXBuilder();
-      Document doc;
-      try {
-        doc = (Document) builder.build(xmlfile);
-
-        Element rootNode = doc.getRootElement();
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-
-        rootNode.addContent(record);
-        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
-
-      } catch (JDOMException e) {
-        logger.error("JDOMException: ", e);
-      }
-
-
-    } else {
-      // create
-      try {
-        String iteration = Integer.toString(i + 1);
-        Element revertrecord = new Element("RevertChangePage");
-        Document doc = new Document(revertrecord);
-        doc.setRootElement(revertrecord);
-
-        Element record = new Element("RevertRecord");
-        record.setAttribute(new Attribute("id", iteration));
-        record.addContent(new Element("datetime").setText(currentDate.toString()));
-        record.addContent(new Element("dirname").setText(dirname));
-        record.addContent(new Element("instance").setText(instance));
-        record.addContent(new Element("query").setText(content));
-
-        doc.getRootElement().addContent(record);
-
-        xmlOutput.output(doc, new FileWriter(ConfigurationCheckImplementation.getHomeDir() + "RevertChangesService.xml"));
-
-      } catch (IOException io) {
-        logger.error("IOException: ", io);
-
-      }
-
-    }
-
-
-  }
-
-  public int fetchInstanceTablenamePigScript(Connection c, String instance, QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-    String ds_id = new String();
-    int id = 0;
-    Statement stmt = null;
-    PreparedStatement prSt = null;
-
-
-    ResultSet rs = null;
-
-
-    prSt = ambaridatabase.getTableIdFromInstanceName(c, instance);
-
-    logger.info("sql statement to fetch is from ambari instance:= =  " + prSt);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      id = rs.getInt("id");
-    }
-    return id;
-
-  }
-
-  public int fetchmaxIdforPigSavedScript(Connection c, int id, QuerySetAmbariDB ambaridatabase) throws SQLException {
-
-
-    String ds_id = null;
-    ResultSet rs = null;
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.getMaxDsIdFromTableId(c, id);
-
-    rs = prSt.executeQuery();
-
-    while (rs.next()) {
-      ds_id = rs.getString("max");
-    }
-
-    int num;
-    if (ds_id == null) {
-      num = 1;
-    } else {
-      num = Integer.parseInt(ds_id);
-    }
-    return num;
-  }
-
-  public void insertRowForPigScript(String dirname, int maxcountforpigjob, int maxcount, String time, String time2, long epochtime, String title, Connection c, int id, String instance, int i, QuerySetAmbariDB ambaridatabase) throws SQLException, IOException {
-
-    String maxcount1 = Integer.toString(maxcount);
-    String epochtime1 = Long.toString(epochtime);
-    String revSql = null;
-
-    PreparedStatement prSt = null;
-
-    prSt = ambaridatabase.insertToPigScript(c, id, maxcount1, dirname, title);
-
-    prSt.executeUpdate();
-
-    revSql = ambaridatabase.revertSql(id, maxcount1);
-
-    wrtitetoalternatesqlfile(dirname, revSql, instance, i);
-
-  }
-
-
-  public long getEpochTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-
-    return epoch;
-
-  }
-
-
-  public String getTime() throws ParseException {
-    int day, month, year;
-    int second, minute, hour;
-    int milisecond;
-    GregorianCalendar date = new GregorianCalendar();
-
-    day = date.get(Calendar.DAY_OF_MONTH);
-    month = date.get(Calendar.MONTH);
-    year = date.get(Calendar.YEAR);
-
-    second = date.get(Calendar.SECOND);
-    minute = date.get(Calendar.MINUTE);
-    hour = date.get(Calendar.HOUR);
-    milisecond = date.get(Calendar.MILLISECOND);
-
-    String s = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute;
-    String s1 = year + "-" + (month + 1) + "-" + day + "_" + hour + "-" + minute + "-" + second + "-" + milisecond;
-    SimpleDateFormat df = new SimpleDateFormat("yyyy-MM-dd_HH-mm-ss-SSS");
-    Date date1 = df.parse(s1);
-    long epoch = date1.getTime();
-
-    return s;
-
-  }
-
-
-  public String getTimeInorder() throws ParseException {
-    SimpleDateFormat sdfDate = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss.msssss +00:00:00");//dd/MM/yyyy
-    Date now = new Date();
-    String strDate = sdfDate.format(now);
-    return strDate;
-  }
-
-
-  public ArrayList<PigModel> fetchFromHueDatabase(String username, String startdate, String endtime, Connection connection, QuerySet huedatabase) throws ClassNotFoundException, IOException {
-    int id = 0;
-    int i = 0;
-    ResultSet rs1 = null;
-    String[] query = new String[100];
-    ArrayList<PigModel> pigArrayList = new ArrayList<PigModel>();
-    try {
-      Statement statement = connection.createStatement();
-      connection.setAutoCommit(false);
-      PreparedStatement prSt = null;
-      ResultSet rs;
-      if (username.equals("all")) {
-      } else {
-
-        prSt = huedatabase.getUseridfromUserName(connection, username);
-
-        rs = prSt.executeQuery();
-
-        while (rs.next()) {
-          id = rs.getInt("id");
-        }
-      }
-
-      if (startdate.equals("") && endtime.equals("")) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDateAllUser(connection);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateNoEndDate(connection, id);
-
-        }
-
-      } else if ((startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDateAllUser(connection, endtime);
-        } else {
-          prSt = huedatabase.getQueriesNoStartDateYesEndDate(connection, id, endtime);
-
-        }
-      } else if (!(startdate.equals("")) && (endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDateAllUser(connection, startdate);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateNoEndDate(connection, id, startdate);
-
-        }
-
-      } else if (!(startdate.equals("")) && !(endtime.equals(""))) {
-        if (username.equals("all")) {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDateAllUser(connection, startdate, endtime);
-        } else {
-          prSt = huedatabase.getQueriesYesStartDateYesEndDate(connection, id, startdate, endtime);
-        }
-
-
-      }
-
-      rs1 = prSt.executeQuery();
-
-
-      // rs1 = statement.executeQuery("select pig_script,title,date_created,saved,arguments from pig_pigscript where saved=1 AND user_id ="+id+" AND date_created BETWEEN '"+ startdate +"' AND '"  +endtime +"';");
-      while (rs1.next()) {
-        PigModel pojopig = new PigModel();
-        String script = rs1.getString("pig_script");
-        String title = rs1.getString("title");
-        Date created_data = rs1.getDate("date_created");
-        pojopig.setDt(created_data);
-        pojopig.setScript(script);
-        pojopig.setTitle(title);
-
-        pigArrayList.add(pojopig);
-        i++;
-      }
-
-
-    } catch (SQLException e) {
-      logger.error("SQLException", e);
-    } finally {
-      try {
-        if (connection != null)
-          connection.close();
-      } catch (SQLException e) {
-        logger.error("SQLException", e);
-      }
-    }
-
-    return pigArrayList;
-
-  }
-
-  public void writetPigScripttoLocalFile(String script, String title, Date createddate, String homedir, String filename2) {
-    try {
-      logger.info(homedir + filename2);
-      File file = new File(homedir + filename2);
-
-      if (!file.exists()) {
-        file.createNewFile();
-      }
-
-      FileWriter fw = new FileWriter(file.getAbsoluteFile());
-      BufferedWriter bw = new BufferedWriter(fw);
-      bw.write(script);
-      bw.close();
-
-
-    } catch (IOException e) {
-
-      logger.error("IOException", e);
-    }
-
-  }
-
-  public void deletePigScriptLocalFile(String homedir, String filename2) {
-    try {
-
-      File file = new File(homedir + filename2);
-
-      if (file.delete()) {
-        logger.info("Temproray file deleted");
-      } else {
-        logger.info("Temproray file delete failed");
-      }
-
-    } catch (Exception e) {
-
-      logger.error("File Exception: ", e);
-
-    }
-
-  }
-
-  public void putFileinHdfs(final String source, final String dest, final String namenodeuri)
-    throws IOException {
-
-    try {
-      UserGroupInformation ugi = UserGroupInformation
-        .createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          Configuration conf = new Configuration();
-          conf.set("fs.hdfs.impl",
-            org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-          );
-          conf.set("fs.file.impl",
-            org.apache.hadoop.fs.LocalFileSystem.class.getName()
-          );
-          conf.set("fs.defaultFS", namenodeuri);
-          conf.set("hadoop.job.ugi", "hdfs");
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs Exception: ", e);
-    }
-
-  }
-
-  public void putFileinHdfsSecured(final String source, final String dest, final String namenodeuri)
-    throws IOException {
-
-    try {
-      final Configuration conf = new Configuration();
-
-      conf.set("fs.hdfs.impl",
-        org.apache.hadoop.hdfs.DistributedFileSystem.class.getName()
-      );
-      conf.set("fs.file.impl",
-        org.apache.hadoop.fs.LocalFileSystem.class.getName()
-      );
-      conf.set("fs.defaultFS", namenodeuri);
-      conf.set("hadoop.job.ugi", "hdfs");
-      conf.set("hadoop.security.authentication", "Kerberos");
-
-      UserGroupInformation.setConfiguration(conf);
-      UserGroupInformation ugi = UserGroupInformation.createRemoteUser("hdfs");
-
-      ugi.doAs(new PrivilegedExceptionAction<Void>() {
-
-        public Void run() throws Exception {
-
-          FileSystem fileSystem = FileSystem.get(conf);
-
-          String filename = source.substring(
-            source.lastIndexOf('/') + 1, source.length());
-          String dest1;
-          if (dest.charAt(dest.length() - 1) != '/') {
-            dest1 = dest + "/" + filename;
-          } else {
-            dest1 = dest + filename;
-          }
-
-          Path path = new Path(dest1);
-          if (fileSystem.exists(path)) {
-
-          }
-          //	Path pathsource = new Path(source);
-          FSDataOutputStream out = fileSystem.create(path);
-
-          InputStream in = new BufferedInputStream(
-            new FileInputStream(new File(source)));
-
-          byte[] b = new byte[1024];
-          int numBytes = 0;
-          while ((numBytes = in.read(b)) > 0) {
-            out.write(b, 0, numBytes);
-          }
-          in.close();
-          out.close();
-          fileSystem.close();
-          return null;
-        }
-      });
-    } catch (Exception e) {
-      logger.error("Webhdfs Exception: ", e);
-
-    }
-
-  }
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
deleted file mode 100644
index 44e27c1..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/pig/pigscript/PigScriptMigrationUtility.java
+++ /dev/null
@@ -1,229 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-
-package org.apache.ambari.view.huetoambarimigration.migration.pig.pigscript;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceAmbariDatabase;
-import org.apache.ambari.view.huetoambarimigration.datasource.DataSourceHueDatabase;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.PigModel;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.ambariqueryset.pig.savedscriptqueryset.*;
-import org.apache.ambari.view.huetoambarimigration.datasource.queryset.huequeryset.pig.savedscriptqueryset.*;
-import org.apache.ambari.view.huetoambarimigration.resources.PersonalCRUDResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.MigrationResourceManager;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.apache.ambari.view.huetoambarimigration.migration.configuration.ConfigurationCheckImplementation;
-import org.apache.log4j.Logger;
-
-import java.beans.PropertyVetoException;
-import java.io.IOException;
-import java.sql.Connection;
-import java.sql.SQLException;
-import java.text.ParseException;
-import java.util.ArrayList;
-
-public class PigScriptMigrationUtility {
-
-  protected MigrationResourceManager resourceManager = null;
-
-  public synchronized PersonalCRUDResourceManager<MigrationResponse> getResourceManager(ViewContext view) {
-    if (resourceManager == null) {
-      resourceManager = new MigrationResourceManager(view);
-    }
-    return resourceManager;
-  }
-
-
-  public void pigScriptMigration(String username, String instance, String startDate, String endDate, ViewContext view, MigrationResponse migrationresult, String jobid) throws IOException, ItemNotFound {
-
-    long startTime = System.currentTimeMillis();
-
-    final Logger logger = Logger.getLogger(PigScriptMigrationUtility.class);
-    Connection connectionHuedb = null;
-    Connection connectionAmbaridb = null;
-
-    logger.info("-------------------------------------");
-    logger.info("pig saved script Migration started");
-    logger.info("-------------------------------------");
-
-
-    int i = 0;
-
-    logger.info("start date: " + startDate);
-    logger.info("enddate date: " + endDate);
-    logger.info("instance is: " + username);
-    logger.info("hue username is : " + instance);
-
-    //Reading the configuration file
-    PigScriptMigrationImplementation pigsavedscriptmigration = new PigScriptMigrationImplementation();
-
-    QuerySet huedatabase = null;
-
-    if (view.getProperties().get("huedrivername").contains("mysql")) {
-      huedatabase = new MysqlQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("postgresql")) {
-      huedatabase = new PostgressQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("sqlite")) {
-
-      huedatabase = new SqliteQuerySet();
-    } else if (view.getProperties().get("huedrivername").contains("oracle")) {
-      huedatabase = new OracleQuerySet();
-    }
-
-    QuerySetAmbariDB ambaridatabase = null;
-
-
-    if (view.getProperties().get("ambaridrivername").contains("mysql")) {
-      ambaridatabase = new MysqlQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("postgresql")) {
-      ambaridatabase = new PostgressQuerySetAmbariDB();
-    } else if (view.getProperties().get("ambaridrivername").contains("oracle")) {
-      ambaridatabase = new OracleQuerySetAmbariDB();
-    }
-
-    int maxcountforsavequery = 0, maxcountforpigsavedscript;
-    String time = null, timetobeInorder = null;
-    Long epochTime = null;
-    String dirNameForPigScript, completeDirandFilePath, pigscriptFilename = "";
-    int pigInstanceTableName;
-
-    ArrayList<PigModel> dbpojoPigSavedscript = new ArrayList<PigModel>();
-
-    try {
-      connectionHuedb = DataSourceHueDatabase.getInstance(view.getProperties().get("huedrivername"), view.getProperties().get("huejdbcurl"), view.getProperties().get("huedbusername"), view.getProperties().get("huedbpassword")).getConnection();//connection to Hue DB
-      dbpojoPigSavedscript = pigsavedscriptmigration.fetchFromHueDatabase(username, startDate, endDate, connectionHuedb, huedatabase);// Fetching pig script details from Hue DB
-
-      for (int j = 0; j < dbpojoPigSavedscript.size(); j++) {
-        logger.info("the query fetched from hue=" + dbpojoPigSavedscript.get(j).getScript());
-
-      }
-
-
-      /* If No pig Script has been fetched from Hue db according to our search criteria*/
-      if (dbpojoPigSavedscript.size() == 0) {
-
-        migrationresult.setIsNoQuerySelected("yes");
-        migrationresult.setProgressPercentage(0);
-        migrationresult.setNumberOfQueryTransfered(0);
-        migrationresult.setTotalNoQuery(dbpojoPigSavedscript.size());
-        getResourceManager(view).update(migrationresult, jobid);
-
-        logger.info("no pig script has been selected from hue according to your criteria of searching");
-
-
-      } else {
-
-        connectionAmbaridb = DataSourceAmbariDatabase.getInstance(view.getProperties().get("ambaridrivername"), view.getProperties().get("ambarijdbcurl"), view.getProperties().get("ambaridbusername"), view.getProperties().get("ambaridbpassword")).getConnection();// connecting to ambari db
-        connectionAmbaridb.setAutoCommit(false);
-        logger.info("loop will continue for " + dbpojoPigSavedscript.size() + "times");
-
-        //for each pig script found in Hue Database
-
-        for (i = 0; i < dbpojoPigSavedscript.size(); i++) {
-
-
-          float calc = ((float) (i + 1)) / dbpojoPigSavedscript.size() * 100;
-          int progressPercentage = Math.round(calc);
-          migrationresult.setIsNoQuerySelected("no");
-          migrationresult.setProgressPercentage(progressPercentage);
-          migrationresult.setNumberOfQueryTransfered(i + 1);
-          migrationresult.setTotalNoQuery(dbpojoPigSavedscript.size());
-          getResourceManager(view).update(migrationresult, jobid);
-
-          logger.info("Loop No." + (i + 1));
-          logger.info("________________");
-          logger.info("the title of script:  " + dbpojoPigSavedscript.get(i).getTitle());
-
-          pigInstanceTableName = pigsavedscriptmigration.fetchInstanceTablenamePigScript(connectionAmbaridb, instance, ambaridatabase);// finding the table name in ambari from the given instance
-
-          maxcountforpigsavedscript = (pigsavedscriptmigration.fetchmaxIdforPigSavedScript(connectionAmbaridb, pigInstanceTableName, ambaridatabase) + 1);// maximum count of the primary key of pig Script table
-
-          time = pigsavedscriptmigration.getTime();
-
-          timetobeInorder = pigsavedscriptmigration.getTimeInorder();
-
-          epochTime = pigsavedscriptmigration.getEpochTime();
-
-          dirNameForPigScript = "/user/admin/pig/scripts/";
-
-          pigscriptFilename = dbpojoPigSavedscript.get(i).getTitle() + "-" + time + ".pig";
-
-          completeDirandFilePath = dirNameForPigScript + pigscriptFilename;
-
-          pigsavedscriptmigration.writetPigScripttoLocalFile(dbpojoPigSavedscript.get(i).getScript(), dbpojoPigSavedscript.get(i).getTitle(), dbpojoPigSavedscript.get(i).getDt(), ConfigurationCheckImplementation.getHomeDir(), pigscriptFilename);
-
-          pigsavedscriptmigration.insertRowForPigScript(completeDirandFilePath, maxcountforsavequery, maxcountforpigsavedscript, time, timetobeInorder, epochTime, dbpojoPigSavedscript.get(i).getTitle(), connectionAmbaridb, pigInstanceTableName, instance, i, ambaridatabase);
-
-          if (view.getProperties().get("KerberoseEnabled").equals("y")) {
-            pigsavedscriptmigration.putFileinHdfsSecured(ConfigurationCheckImplementation.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
-          } else {
-            pigsavedscriptmigration.putFileinHdfs(ConfigurationCheckImplementation.getHomeDir() + pigscriptFilename, dirNameForPigScript, view.getProperties().get("namenode_URI_Ambari"));
-          }
-
-          logger.info(dbpojoPigSavedscript.get(i).getTitle() + "Migrated to Ambari");
-
-          pigsavedscriptmigration.deletePigScriptLocalFile(ConfigurationCheckImplementation.getHomeDir(), pigscriptFilename);
-
-        }
-        connectionAmbaridb.commit();
-
-      }
-
-
-    } catch (SQLException e) {
-      logger.error("Sql exception in ambari database", e);
-      try {
-        connectionAmbaridb.rollback();
-        logger.info("rollback done");
-      } catch (SQLException e1) {
-        logger.error("Sql exception while doing roll back", e);
-      }
-    } catch (ClassNotFoundException e2) {
-      logger.error("class not found exception", e2);
-    } catch (ParseException e) {
-      logger.error("ParseException: ", e);
-    } catch (PropertyVetoException e) {
-      logger.error("PropertyVetoException: ", e);
-    } finally {
-      if (null != connectionAmbaridb)
-        try {
-          connectionAmbaridb.close();
-        } catch (SQLException e) {
-          logger.error("connection close exception: ", e);
-        }
-    }
-
-    long stopTime = System.currentTimeMillis();
-    long elapsedTime = stopTime - startTime;
-
-
-    migrationresult.setJobtype("hivehistoryquerymigration");
-    migrationresult.setTotalTimeTaken(String.valueOf(elapsedTime));
-    getResourceManager(view).update(migrationresult, jobid);
-
-
-    logger.info("----------------------------------");
-    logger.info("pig saved script Migration ends");
-    logger.info("----------------------------------");
-
-  }
-
-
-}

http://git-wip-us.apache.org/repos/asf/ambari/blob/283256c8/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java
----------------------------------------------------------------------
diff --git a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java b/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java
deleted file mode 100644
index 31cec2f..0000000
--- a/contrib/views/hueambarimigration/src/main/java/org/apache/ambari/view/huetoambarimigration/migration/revertchange/RevertChangeStartJob.java
+++ /dev/null
@@ -1,70 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- * <p/>
- * http://www.apache.org/licenses/LICENSE-2.0
- * <p/>
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.ambari.view.huetoambarimigration.migration.revertchange;
-
-import org.apache.ambari.view.ViewContext;
-import org.apache.ambari.view.huetoambarimigration.persistence.utils.ItemNotFound;
-import org.apache.ambari.view.huetoambarimigration.resources.scripts.models.MigrationResponse;
-import org.json.simple.JSONObject;
-
-import java.io.IOException;
-
-
-public class RevertChangeStartJob extends Thread{
-
-
-  String instance;
-  String revertdate;
-  String jobid;
-  ViewContext view;
-
-  public RevertChangeStartJob(String instance, String revertdate, String jobid, ViewContext view) {
-
-    this.instance=instance;
-    this.revertdate=revertdate;
-    this.jobid=jobid;
-    this.view=view;
-  }
-
-
-
-  @Override
-  public void run() {
-
-    MigrationResponse migrationresult=new MigrationResponse();
-
-    migrationresult.setId(jobid);
-    migrationresult.setIntanceName(instance);
-    migrationresult.setProgressPercentage(0);
-
-    JSONObject response = new JSONObject();
-
-
-    RevertChangeUtility revertchange = new RevertChangeUtility();
-    try {
-      revertchange.revertChangeUtility(instance,revertdate,jobid,view,migrationresult);
-    }
-    catch (IOException e) {
-      e.printStackTrace();
-    } catch (ItemNotFound itemNotFound) {
-      itemNotFound.printStackTrace();
-    }
-
-  }
-
-}