You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airavata.apache.org by sm...@apache.org on 2015/03/24 10:19:12 UTC

[1/8] airavata git commit: Adding python and re-arranging the script - AIRAVATA-1641

Repository: airavata
Updated Branches:
  refs/heads/master aa27ce109 -> 97c7a7362


Adding python and re-arranging the script - AIRAVATA-1641


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/c090519a
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/c090519a
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/c090519a

Branch: refs/heads/master
Commit: c090519ae2198c1d2624760c8200fb5ebf36b6be
Parents: aa27ce1
Author: Suresh Marru <sm...@apache.org>
Authored: Tue Mar 24 05:05:22 2015 -0400
Committer: Suresh Marru <sm...@apache.org>
Committed: Tue Mar 24 05:05:22 2015 -0400

----------------------------------------------------------------------
 airavata-api/generate-thrift-files.sh | 256 +++++++++++++++++++----------
 1 file changed, 173 insertions(+), 83 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/c090519a/airavata-api/generate-thrift-files.sh
----------------------------------------------------------------------
diff --git a/airavata-api/generate-thrift-files.sh b/airavata-api/generate-thrift-files.sh
index c8a000d..88d38ea 100755
--- a/airavata-api/generate-thrift-files.sh
+++ b/airavata-api/generate-thrift-files.sh
@@ -15,19 +15,62 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# This script will regenerate the thrift code for Airavata Server Skeletons, Client Stubs and Data Model java beans.
-#
-# Credit: This script was created referring to Apache Accumulo project and tuned to Airavata Needs.
+# This script will generate/regenerate the thrift code for Airavata Server Skeletons, Client Stubs
+#    and Data Model java beans in java, C++, PHP and Python.
+
+show_usage() {
+	echo -e "Usage: $0 [Languague to generate stubs]"
+	echo ""
+	echo "options:"
+	echo -e "\tjava Generate/Update Java Stubs"
+	echo -e "\tphp Generate/Update PHP Stubs"
+	echo -e "\tcpp Generate/Update C++ Stubs"
+	echo -e "\tpython Generate/Update Python Stubs."
+	echo -e "\tall Generate/Update all stubs (Java, PHP, C++, Python)."
+	echo -e "\t-h[elp] Print the usage options of this script"
+}
+
+if [ $# -lt 1 ]
+then
+	show_usage
+	exit 1
+fi
+
+if [[ $1 == "-h" ||$1 == "--help" ]]
+then
+	show_usage
+	exit 0
+fi
+
+# Generation of thrift files will require installing Apache Thrift. Please add thrift to your path.
+#  Verify is thrift is installed, is in the path is at a specified version.
 
-# Global Constants used across the script
 REQUIRED_THRIFT_VERSION='0.9.1'
+THRIFT_EXEC=/usr/local/Cellar/thrift/0.9.1/bin/thrift
+
+VERSION=$($THRIFT_EXEC -version 2>/dev/null | grep -F "${REQUIRED_THRIFT_VERSION}" |  wc -l)
+if [ "$VERSION" -ne 1 ] ; then
+    echo -e "ERROR:\t Apache Thrift version ${REQUIRED_THRIFT_VERSION} is required."
+    echo -e "It is either not installed or is not in the path"
+    exit 1
+fi
+
+# Global Constants used across the script
 THRIFT_IDL_DIR='thrift-interface-descriptions'
 BASE_TARGET_DIR='target'
 DATAMODEL_SRC_DIR='airavata-data-models/src/main/java'
 JAVA_API_SDK_DIR='airavata-api-stubs/src/main/java'
-CPP_SDK_DIR='airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/'
 PHP_SDK_DIR='airavata-client-sdks/airavata-php-sdk/src/main/resources/lib'
-THRIFT_EXEC=/usr/local/Cellar/thrift/0.9.1/bin/thrift
+CPP_SDK_DIR='airavata-client-sdks/airavata-cpp-sdk/src/main/resources/lib/airavata/'
+PYTHON_SDK_DIR='airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/'
+
+# Initialize the thrift arguments.
+#  Since most of the Airavata API and Data Models have includes, use recursive option by default.
+#  Generate all the files in target directory
+THRIFT_ARGS="-r -o ${BASE_TARGET_DIR}"
+# Ensure the required target directories exists, if not create.
+mkdir -p ${BASE_TARGET_DIR}
+
 # The Function fail prints error messages on failure and quits the script.
 fail() {
     echo $@
@@ -94,116 +137,164 @@ copy_changed_files() {
     rsync -auv ${GENERATED_CODE_DIR}/ ${WORKSPACE_SRC_DIR}
 }
 
-# Generation of thrift files will require installing Apache Thrift. Please add thrift to your path.
-#  Verify is thrift is installed, is in the path is at a specified version.
-VERSION=$($THRIFT_EXEC -version 2>/dev/null | grep -F "${REQUIRED_THRIFT_VERSION}" |  wc -l)
-if [ "$VERSION" -ne 1 ] ; then
-    echo "****************************************************"
-    echo "*** thrift is not installed or is not in the path"
-    echo "***   expecting 'thrift -version' to return ${REQUIRED_THRIFT_VERSION}"
-    echo "*** generated code will not be updated"
-    fail "****************************************************"
-fi
-
-# Initialize the thrift arguments.
-#  Since most of the Airavata API and Data Models have includes, use recursive option by default.
-#  Generate all the files in target directory
-THRIFT_ARGS="-r -o ${BASE_TARGET_DIR}"
-# Ensure the required target directories exists, if not create.
-mkdir -p ${BASE_TARGET_DIR}
-
 #######################################
 # Generate/Update Airavata Data Model #
 #######################################
 
-#Java Beans generation directory
-JAVA_BEAN_GEN_DIR=${BASE_TARGET_DIR}/gen-javabean
+generate_java_stubs() {
 
-# As a precaution  remove and previously generated files if exists
-rm -rf ${JAVA_BEAN_GEN_DIR}
+    #Java Beans generation directory
+    JAVA_BEAN_GEN_DIR=${BASE_TARGET_DIR}/gen-javabean
 
-# Generate the Airavata Data Model using thrift Java Beans generator. This will take generate the classes in bean style
-#   with members being private and setters returning voids.
-#   The airavataDataModel.thrift includes rest of data models.
-$THRIFT_EXEC ${THRIFT_ARGS} --gen java:beans ${THRIFT_IDL_DIR}/airavataDataModel.thrift || fail unable to generate java bean thrift classes on base data model
+    # As a precaution  remove and previously generated files if exists
+    rm -rf ${JAVA_BEAN_GEN_DIR}
 
-$THRIFT_EXEC ${THRIFT_ARGS} --gen java:beans ${THRIFT_IDL_DIR}/appCatalogModels.thrift || fail unable to generate java bean thrift classes on app catalog data models
+    # Generate the Airavata Data Model using thrift Java Beans generator. This will take generate the classes in bean style
+    #   with members being private and setters returning voids.
+    #   The airavataDataModel.thrift includes rest of data models.
+    $THRIFT_EXEC ${THRIFT_ARGS} --gen java:beans ${THRIFT_IDL_DIR}/airavataDataModel.thrift || fail unable to generate java bean thrift classes on base data model
 
-$THRIFT_EXEC ${THRIFT_ARGS} --gen java:beans ${THRIFT_IDL_DIR}/workflowDataModel.thrift || fail unable to generate java bean thrift classes on app workflow data models
+    $THRIFT_EXEC ${THRIFT_ARGS} --gen java:beans ${THRIFT_IDL_DIR}/appCatalogModels.thrift || fail unable to generate java bean thrift classes on app catalog data models
 
-# For the generated java beans add the ASF V2 License header
-add_license_header $JAVA_BEAN_GEN_DIR
+    $THRIFT_EXEC ${THRIFT_ARGS} --gen java:beans ${THRIFT_IDL_DIR}/workflowDataModel.thrift || fail unable to generate java bean thrift classes on app workflow data models
 
-# Compare the newly generated beans with existing sources and replace the changed ones.
-copy_changed_files ${JAVA_BEAN_GEN_DIR} ${DATAMODEL_SRC_DIR}
+    # For the generated java beans add the ASF V2 License header
+    add_license_header $JAVA_BEAN_GEN_DIR
 
-###############################################################################
-# Generate/Update source used by Airavata Server Skeletons & Java Client Stubs #
-#  JAVA server and client both use generated api-boilerplate-code             #
-###############################################################################
+    # Compare the newly generated beans with existing sources and replace the changed ones.
+    copy_changed_files ${JAVA_BEAN_GEN_DIR} ${DATAMODEL_SRC_DIR}
 
-#Java generation directory
-JAVA_GEN_DIR=${BASE_TARGET_DIR}/gen-java
+    ###############################################################################
+    # Generate/Update source used by Airavata Server Skeletons & Java Client Stubs #
+    #  JAVA server and client both use generated api-boilerplate-code             #
+    ###############################################################################
 
-# As a precaution  remove and previously generated files if exists
-rm -rf ${JAVA_GEN_DIR}
+    #Java generation directory
+    JAVA_GEN_DIR=${BASE_TARGET_DIR}/gen-java
 
-# Using thrift Java generator, generate the java classes based on Airavata API. This
-#   The airavataAPI.thrift includes rest of data models.
-$THRIFT_EXEC ${THRIFT_ARGS} --gen java ${THRIFT_IDL_DIR}/airavataAPI.thrift || fail unable to generate java thrift classes on AiravataAPI
+    # As a precaution  remove and previously generated files if exists
+    rm -rf ${JAVA_GEN_DIR}
 
-#$THRIFT_EXEC ${THRIFT_ARGS} --gen java ${THRIFT_IDL_DIR}/workflowAPI.thrift || fail unable to generate java thrift classes on WorkflowAPI
+    # Using thrift Java generator, generate the java classes based on Airavata API. This
+    #   The airavataAPI.thrift includes rest of data models.
+    $THRIFT_EXEC ${THRIFT_ARGS} --gen java ${THRIFT_IDL_DIR}/airavataAPI.thrift || fail unable to generate java thrift classes on AiravataAPI
 
-# For the generated java classes add the ASF V2 License header
-add_license_header $JAVA_GEN_DIR
+    #$THRIFT_EXEC ${THRIFT_ARGS} --gen java ${THRIFT_IDL_DIR}/workflowAPI.thrift || fail unable to generate java thrift classes on WorkflowAPI
 
-# Compare the newly generated classes with existing java generated skeleton/stub sources and replace the changed ones.
-#  Only copying the API related classes and avoiding copy of any data models which already exist in the data-models.
-copy_changed_files ${JAVA_GEN_DIR}/org/apache/airavata/api ${JAVA_API_SDK_DIR}/org/apache/airavata/api
+    # For the generated java classes add the ASF V2 License header
+    add_license_header $JAVA_GEN_DIR
+
+    # Compare the newly generated classes with existing java generated skeleton/stub sources and replace the changed ones.
+    #  Only copying the API related classes and avoiding copy of any data models which already exist in the data-models.
+    copy_changed_files ${JAVA_GEN_DIR}/org/apache/airavata/api ${JAVA_API_SDK_DIR}/org/apache/airavata/api
+
+    echo "Successfully generated new java sources, compared against exiting code and replaced the changed files"
+}
+
+####################################
+# Generate/Update PHP Stubs #
+####################################
+
+generate_php_stubs() {
+
+    #PHP generation directory
+    PHP_GEN_DIR=${BASE_TARGET_DIR}/gen-php
+
+    # As a precaution  remove and previously generated files if exists
+    rm -rf ${PHP_GEN_DIR}
+
+    # Using thrift Java generator, generate the java classes based on Airavata API. This
+    #   The airavataAPI.thrift includes rest of data models.
+    $THRIFT_EXEC ${THRIFT_ARGS} --gen php:autoload ${THRIFT_IDL_DIR}/airavataAPI.thrift || fail unable to generate PHP thrift classes
+
+    #$THRIFT_EXEC ${THRIFT_ARGS} --gen php:autoload ${THRIFT_IDL_DIR}/workflowAPI.thrift || fail unable to generate PHP thrift classes for WorkflowAPI
+    # For the generated java classes add the ASF V2 License header
+    ## TODO Write PHP license parser
+
+    # Compare the newly generated classes with existing java generated skeleton/stub sources and replace the changed ones.
+    #  Only copying the API related classes and avoiding copy of any data models which already exist in the data-models.
+    copy_changed_files ${PHP_GEN_DIR} ${PHP_SDK_DIR}
+
+}
 
 ####################################
 # Generate/Update C++ Client Stubs #
 ####################################
 
-#CPP generation directory
-CPP_GEN_DIR=${BASE_TARGET_DIR}/gen-cpp
+generate_cpp_stubs() {
+
+    #CPP generation directory
+    CPP_GEN_DIR=${BASE_TARGET_DIR}/gen-cpp
 
-# As a precaution  remove and previously generated files if exists
-rm -rf ${CPP_GEN_DIR}
+    # As a precaution  remove and previously generated files if exists
+    rm -rf ${CPP_GEN_DIR}
 
-# Using thrift Java generator, generate the java classes based on Airavata API. This
-#   The airavataAPI.thrift includes rest of data models.
-/usr/local/Cellar/thrift/0.9.1/bin/thrift ${THRIFT_ARGS} --gen cpp ${THRIFT_IDL_DIR}/airavataAPI.thrift || fail unable to generate C++ thrift classes
+    # Using thrift Java generator, generate the java classes based on Airavata API. This
+    #   The airavataAPI.thrift includes rest of data models.
+    $THRIFT_EXEC ${THRIFT_ARGS} --gen cpp ${THRIFT_IDL_DIR}/airavataAPI.thrift || fail unable to generate C++ thrift classes
 
-#$THRIFT_EXEC ${THRIFT_ARGS} --gen cpp ${THRIFT_IDL_DIR}/workflowAPI.thrift || fail unable to generate C++ thrift classes for WorkflowAPI
-# For the generated CPP classes add the ASF V2 License header
-add_license_header $CPP_GEN_DIR
+    #$THRIFT_EXEC ${THRIFT_ARGS} --gen cpp ${THRIFT_IDL_DIR}/workflowAPI.thrift || fail unable to generate C++ thrift classes for WorkflowAPI
+    # For the generated CPP classes add the ASF V2 License header
+    add_license_header $CPP_GEN_DIR
 
-# Compare the newly generated classes with existing java generated skeleton/stub sources and replace the changed ones.
-#  Only copying the API related classes and avoiding copy of any data models which already exist in the data-models.
-copy_changed_files ${CPP_GEN_DIR} ${CPP_SDK_DIR}
+    # Compare the newly generated classes with existing java generated skeleton/stub sources and replace the changed ones.
+    #  Only copying the API related classes and avoiding copy of any data models which already exist in the data-models.
+    copy_changed_files ${CPP_GEN_DIR} ${CPP_SDK_DIR}
+
+}
 
 ####################################
-# Generate/Update PHP Stubs #
+# Generate/Update C++ Client Stubs #
 ####################################
 
-#PHP generation directory
-PHP_GEN_DIR=${BASE_TARGET_DIR}/gen-php
+generate_python_stubs() {
 
-# As a precaution  remove and previously generated files if exists
-rm -rf ${PHP_GEN_DIR}
+    #Python generation directory
+    PYTHON_GEN_DIR=${BASE_TARGET_DIR}/gen-py
 
-# Using thrift Java generator, generate the java classes based on Airavata API. This
-#   The airavataAPI.thrift includes rest of data models.
-$THRIFT_EXEC ${THRIFT_ARGS} --gen php:autoload ${THRIFT_IDL_DIR}/airavataAPI.thrift || fail unable to generate PHP thrift classes
+    # As a precaution  remove and previously generated files if exists
+    rm -rf ${PYTHON_GEN_DIR}
 
-#$THRIFT_EXEC ${THRIFT_ARGS} --gen php:autoload ${THRIFT_IDL_DIR}/workflowAPI.thrift || fail unable to generate PHP thrift classes for WorkflowAPI
-# For the generated java classes add the ASF V2 License header
-## TODO Write PHP license parser
+    # Using thrift Python generator, generate the python classes based on Airavata API. This
+    #   The airavataAPI.thrift includes rest of data models.
+    $THRIFT_EXEC ${THRIFT_ARGS} --gen py ${THRIFT_IDL_DIR}/airavataAPI.thrift || fail unable to generate Python thrift classes
+
+    # For the generated CPP classes add the ASF V2 License header
+    #add_license_header #PYTHON_GEN_DIR
+
+    # Compare the newly generated classes with existing java generated skeleton/stub sources and replace the changed ones.
+    #  Only copying the API related classes and avoiding copy of any data models which already exist in the data-models.
+    copy_changed_files ${PYTHON_GEN_DIR} ${PYTHON_SDK_DIR}
+
+}
 
-# Compare the newly generated classes with existing java generated skeleton/stub sources and replace the changed ones.
-#  Only copying the API related classes and avoiding copy of any data models which already exist in the data-models.
-copy_changed_files ${PHP_GEN_DIR} ${PHP_SDK_DIR}
+for arg in "$@"
+do
+    case "$arg" in
+    all)    echo "Generate all stubs (Java, PHP, C++, Python) Stubs"
+            generate_java_stubs
+            generate_php_stubs
+            generate_cpp_stubs
+            generate_python_stubs
+            ;;
+    java)   echo "Generating Java Stubs"
+            generate_java_stubs
+            ;;
+    php)    echo "Generate PHP Stubs"
+            generate_php_stubs
+            ;;
+    cpp)    echo "Generate C++ Stubs"
+            generate_cpp_stubs
+            ;;
+    python)    echo "Generate Python Stubs"
+            generate_python_stubs
+            ;;
+    *)      echo "Invalid or unsupported option"
+    	    show_usage
+	        exit 1
+            ;;
+    esac
+done
 
 ####################
 # Cleanup and Exit #
@@ -211,5 +302,4 @@ copy_changed_files ${PHP_GEN_DIR} ${PHP_SDK_DIR}
 # CleanUp: Delete the base target build directory
 #rm -rf ${BASE_TARGET_DIR}
 
-echo "Successfully generated new sources, compared against exiting code and replaced the changed files"
 exit 0


[3/8] airavata git commit: Adding python generated code - AIRAVATA-1642

Posted by sm...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/ttypes.py
new file mode 100644
index 0000000..794722e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/ttypes.py
@@ -0,0 +1,3042 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import apache.airavata.model.appcatalog.computeresource.ttypes
+import apache.airavata.model.appcatalog.appinterface.ttypes
+
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+class ExperimentState:
+  CREATED = 0
+  VALIDATED = 1
+  SCHEDULED = 2
+  LAUNCHED = 3
+  EXECUTING = 4
+  CANCELING = 5
+  CANCELED = 6
+  SUSPENDED = 7
+  COMPLETED = 8
+  FAILED = 9
+  UNKNOWN = 10
+
+  _VALUES_TO_NAMES = {
+    0: "CREATED",
+    1: "VALIDATED",
+    2: "SCHEDULED",
+    3: "LAUNCHED",
+    4: "EXECUTING",
+    5: "CANCELING",
+    6: "CANCELED",
+    7: "SUSPENDED",
+    8: "COMPLETED",
+    9: "FAILED",
+    10: "UNKNOWN",
+  }
+
+  _NAMES_TO_VALUES = {
+    "CREATED": 0,
+    "VALIDATED": 1,
+    "SCHEDULED": 2,
+    "LAUNCHED": 3,
+    "EXECUTING": 4,
+    "CANCELING": 5,
+    "CANCELED": 6,
+    "SUSPENDED": 7,
+    "COMPLETED": 8,
+    "FAILED": 9,
+    "UNKNOWN": 10,
+  }
+
+class WorkflowNodeState:
+  INVOKED = 0
+  EXECUTING = 1
+  CANCELING = 2
+  CANCELED = 3
+  SUSPENDED = 4
+  COMPLETED = 5
+  FAILED = 6
+  UNKNOWN = 7
+
+  _VALUES_TO_NAMES = {
+    0: "INVOKED",
+    1: "EXECUTING",
+    2: "CANCELING",
+    3: "CANCELED",
+    4: "SUSPENDED",
+    5: "COMPLETED",
+    6: "FAILED",
+    7: "UNKNOWN",
+  }
+
+  _NAMES_TO_VALUES = {
+    "INVOKED": 0,
+    "EXECUTING": 1,
+    "CANCELING": 2,
+    "CANCELED": 3,
+    "SUSPENDED": 4,
+    "COMPLETED": 5,
+    "FAILED": 6,
+    "UNKNOWN": 7,
+  }
+
+class TaskState:
+  WAITING = 0
+  STARTED = 1
+  PRE_PROCESSING = 2
+  CONFIGURING_WORKSPACE = 3
+  INPUT_DATA_STAGING = 4
+  OUTPUT_DATA_STAGING = 5
+  POST_PROCESSING = 6
+  EXECUTING = 7
+  CANCELING = 8
+  CANCELED = 9
+  COMPLETED = 10
+  FAILED = 11
+  UNKNOWN = 12
+
+  _VALUES_TO_NAMES = {
+    0: "WAITING",
+    1: "STARTED",
+    2: "PRE_PROCESSING",
+    3: "CONFIGURING_WORKSPACE",
+    4: "INPUT_DATA_STAGING",
+    5: "OUTPUT_DATA_STAGING",
+    6: "POST_PROCESSING",
+    7: "EXECUTING",
+    8: "CANCELING",
+    9: "CANCELED",
+    10: "COMPLETED",
+    11: "FAILED",
+    12: "UNKNOWN",
+  }
+
+  _NAMES_TO_VALUES = {
+    "WAITING": 0,
+    "STARTED": 1,
+    "PRE_PROCESSING": 2,
+    "CONFIGURING_WORKSPACE": 3,
+    "INPUT_DATA_STAGING": 4,
+    "OUTPUT_DATA_STAGING": 5,
+    "POST_PROCESSING": 6,
+    "EXECUTING": 7,
+    "CANCELING": 8,
+    "CANCELED": 9,
+    "COMPLETED": 10,
+    "FAILED": 11,
+    "UNKNOWN": 12,
+  }
+
+class JobState:
+  SUBMITTED = 0
+  UN_SUBMITTED = 1
+  SETUP = 2
+  QUEUED = 3
+  ACTIVE = 4
+  COMPLETE = 5
+  CANCELING = 6
+  CANCELED = 7
+  FAILED = 8
+  HELD = 9
+  SUSPENDED = 10
+  UNKNOWN = 11
+
+  _VALUES_TO_NAMES = {
+    0: "SUBMITTED",
+    1: "UN_SUBMITTED",
+    2: "SETUP",
+    3: "QUEUED",
+    4: "ACTIVE",
+    5: "COMPLETE",
+    6: "CANCELING",
+    7: "CANCELED",
+    8: "FAILED",
+    9: "HELD",
+    10: "SUSPENDED",
+    11: "UNKNOWN",
+  }
+
+  _NAMES_TO_VALUES = {
+    "SUBMITTED": 0,
+    "UN_SUBMITTED": 1,
+    "SETUP": 2,
+    "QUEUED": 3,
+    "ACTIVE": 4,
+    "COMPLETE": 5,
+    "CANCELING": 6,
+    "CANCELED": 7,
+    "FAILED": 8,
+    "HELD": 9,
+    "SUSPENDED": 10,
+    "UNKNOWN": 11,
+  }
+
+class TransferState:
+  DIRECTORY_SETUP = 0
+  UPLOAD = 1
+  DOWNLOAD = 2
+  ACTIVE = 3
+  COMPLETE = 4
+  STDOUT_DOWNLOAD = 5
+  STDERROR_DOWNLOAD = 6
+  CANCELING = 7
+  CANCELED = 8
+  FAILED = 9
+  HELD = 10
+  SUSPENDED = 11
+  UNKNOWN = 12
+
+  _VALUES_TO_NAMES = {
+    0: "DIRECTORY_SETUP",
+    1: "UPLOAD",
+    2: "DOWNLOAD",
+    3: "ACTIVE",
+    4: "COMPLETE",
+    5: "STDOUT_DOWNLOAD",
+    6: "STDERROR_DOWNLOAD",
+    7: "CANCELING",
+    8: "CANCELED",
+    9: "FAILED",
+    10: "HELD",
+    11: "SUSPENDED",
+    12: "UNKNOWN",
+  }
+
+  _NAMES_TO_VALUES = {
+    "DIRECTORY_SETUP": 0,
+    "UPLOAD": 1,
+    "DOWNLOAD": 2,
+    "ACTIVE": 3,
+    "COMPLETE": 4,
+    "STDOUT_DOWNLOAD": 5,
+    "STDERROR_DOWNLOAD": 6,
+    "CANCELING": 7,
+    "CANCELED": 8,
+    "FAILED": 9,
+    "HELD": 10,
+    "SUSPENDED": 11,
+    "UNKNOWN": 12,
+  }
+
+class ActionableGroup:
+  RESOURCE_ADMINS = 0
+  AIRAVATA_ADMINS = 1
+  GATEWAYS_ADMINS = 2
+  USER = 3
+  CANNOT_BE_DETERMINED = 4
+
+  _VALUES_TO_NAMES = {
+    0: "RESOURCE_ADMINS",
+    1: "AIRAVATA_ADMINS",
+    2: "GATEWAYS_ADMINS",
+    3: "USER",
+    4: "CANNOT_BE_DETERMINED",
+  }
+
+  _NAMES_TO_VALUES = {
+    "RESOURCE_ADMINS": 0,
+    "AIRAVATA_ADMINS": 1,
+    "GATEWAYS_ADMINS": 2,
+    "USER": 3,
+    "CANNOT_BE_DETERMINED": 4,
+  }
+
+class ErrorCategory:
+  FILE_SYSTEM_FAILURE = 0
+  APPLICATION_FAILURE = 1
+  RESOURCE_NODE_FAILURE = 2
+  DISK_FULL = 3
+  INSUFFICIENT_ALLOCATION = 4
+  SYSTEM_MAINTENANCE = 5
+  AIRAVATA_INTERNAL_ERROR = 6
+  CANNOT_BE_DETERMINED = 7
+
+  _VALUES_TO_NAMES = {
+    0: "FILE_SYSTEM_FAILURE",
+    1: "APPLICATION_FAILURE",
+    2: "RESOURCE_NODE_FAILURE",
+    3: "DISK_FULL",
+    4: "INSUFFICIENT_ALLOCATION",
+    5: "SYSTEM_MAINTENANCE",
+    6: "AIRAVATA_INTERNAL_ERROR",
+    7: "CANNOT_BE_DETERMINED",
+  }
+
+  _NAMES_TO_VALUES = {
+    "FILE_SYSTEM_FAILURE": 0,
+    "APPLICATION_FAILURE": 1,
+    "RESOURCE_NODE_FAILURE": 2,
+    "DISK_FULL": 3,
+    "INSUFFICIENT_ALLOCATION": 4,
+    "SYSTEM_MAINTENANCE": 5,
+    "AIRAVATA_INTERNAL_ERROR": 6,
+    "CANNOT_BE_DETERMINED": 7,
+  }
+
+class CorrectiveAction:
+  RETRY_SUBMISSION = 0
+  CONTACT_SUPPORT = 1
+  CANNOT_BE_DETERMINED = 2
+
+  _VALUES_TO_NAMES = {
+    0: "RETRY_SUBMISSION",
+    1: "CONTACT_SUPPORT",
+    2: "CANNOT_BE_DETERMINED",
+  }
+
+  _NAMES_TO_VALUES = {
+    "RETRY_SUBMISSION": 0,
+    "CONTACT_SUPPORT": 1,
+    "CANNOT_BE_DETERMINED": 2,
+  }
+
+class ExecutionUnit:
+  INPUT = 0
+  APPLICATION = 1
+  OUTPUT = 2
+  OTHER = 3
+
+  _VALUES_TO_NAMES = {
+    0: "INPUT",
+    1: "APPLICATION",
+    2: "OUTPUT",
+    3: "OTHER",
+  }
+
+  _NAMES_TO_VALUES = {
+    "INPUT": 0,
+    "APPLICATION": 1,
+    "OUTPUT": 2,
+    "OTHER": 3,
+  }
+
+
+class ExperimentStatus:
+  """
+  Attributes:
+   - experimentState
+   - timeOfStateChange
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'experimentState', None, None, ), # 1
+    (2, TType.I64, 'timeOfStateChange', None, None, ), # 2
+  )
+
+  def __init__(self, experimentState=None, timeOfStateChange=None,):
+    self.experimentState = experimentState
+    self.timeOfStateChange = timeOfStateChange
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.experimentState = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.timeOfStateChange = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ExperimentStatus')
+    if self.experimentState is not None:
+      oprot.writeFieldBegin('experimentState', TType.I32, 1)
+      oprot.writeI32(self.experimentState)
+      oprot.writeFieldEnd()
+    if self.timeOfStateChange is not None:
+      oprot.writeFieldBegin('timeOfStateChange', TType.I64, 2)
+      oprot.writeI64(self.timeOfStateChange)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.experimentState is None:
+      raise TProtocol.TProtocolException(message='Required field experimentState is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class WorkflowNodeStatus:
+  """
+  Attributes:
+   - workflowNodeState
+   - timeOfStateChange
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'workflowNodeState', None, None, ), # 1
+    (2, TType.I64, 'timeOfStateChange', None, None, ), # 2
+  )
+
+  def __init__(self, workflowNodeState=None, timeOfStateChange=None,):
+    self.workflowNodeState = workflowNodeState
+    self.timeOfStateChange = timeOfStateChange
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.workflowNodeState = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.timeOfStateChange = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('WorkflowNodeStatus')
+    if self.workflowNodeState is not None:
+      oprot.writeFieldBegin('workflowNodeState', TType.I32, 1)
+      oprot.writeI32(self.workflowNodeState)
+      oprot.writeFieldEnd()
+    if self.timeOfStateChange is not None:
+      oprot.writeFieldBegin('timeOfStateChange', TType.I64, 2)
+      oprot.writeI64(self.timeOfStateChange)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.workflowNodeState is None:
+      raise TProtocol.TProtocolException(message='Required field workflowNodeState is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TaskStatus:
+  """
+  Attributes:
+   - executionState
+   - timeOfStateChange
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'executionState', None, None, ), # 1
+    (2, TType.I64, 'timeOfStateChange', None, None, ), # 2
+  )
+
+  def __init__(self, executionState=None, timeOfStateChange=None,):
+    self.executionState = executionState
+    self.timeOfStateChange = timeOfStateChange
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.executionState = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.timeOfStateChange = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TaskStatus')
+    if self.executionState is not None:
+      oprot.writeFieldBegin('executionState', TType.I32, 1)
+      oprot.writeI32(self.executionState)
+      oprot.writeFieldEnd()
+    if self.timeOfStateChange is not None:
+      oprot.writeFieldBegin('timeOfStateChange', TType.I64, 2)
+      oprot.writeI64(self.timeOfStateChange)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.executionState is None:
+      raise TProtocol.TProtocolException(message='Required field executionState is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class JobStatus:
+  """
+  Attributes:
+   - jobState
+   - timeOfStateChange
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'jobState', None, None, ), # 1
+    (2, TType.I64, 'timeOfStateChange', None, None, ), # 2
+  )
+
+  def __init__(self, jobState=None, timeOfStateChange=None,):
+    self.jobState = jobState
+    self.timeOfStateChange = timeOfStateChange
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.jobState = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.timeOfStateChange = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('JobStatus')
+    if self.jobState is not None:
+      oprot.writeFieldBegin('jobState', TType.I32, 1)
+      oprot.writeI32(self.jobState)
+      oprot.writeFieldEnd()
+    if self.timeOfStateChange is not None:
+      oprot.writeFieldBegin('timeOfStateChange', TType.I64, 2)
+      oprot.writeI64(self.timeOfStateChange)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobState is None:
+      raise TProtocol.TProtocolException(message='Required field jobState is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TransferStatus:
+  """
+  Attributes:
+   - transferState
+   - timeOfStateChange
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'transferState', None, None, ), # 1
+    (2, TType.I64, 'timeOfStateChange', None, None, ), # 2
+  )
+
+  def __init__(self, transferState=None, timeOfStateChange=None,):
+    self.transferState = transferState
+    self.timeOfStateChange = timeOfStateChange
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.transferState = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.timeOfStateChange = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TransferStatus')
+    if self.transferState is not None:
+      oprot.writeFieldBegin('transferState', TType.I32, 1)
+      oprot.writeI32(self.transferState)
+      oprot.writeFieldEnd()
+    if self.timeOfStateChange is not None:
+      oprot.writeFieldBegin('timeOfStateChange', TType.I64, 2)
+      oprot.writeI64(self.timeOfStateChange)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.transferState is None:
+      raise TProtocol.TProtocolException(message='Required field transferState is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ApplicationStatus:
+  """
+  Attributes:
+   - applicationState
+   - timeOfStateChange
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'applicationState', None, None, ), # 1
+    (2, TType.I64, 'timeOfStateChange', None, None, ), # 2
+  )
+
+  def __init__(self, applicationState=None, timeOfStateChange=None,):
+    self.applicationState = applicationState
+    self.timeOfStateChange = timeOfStateChange
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.applicationState = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.timeOfStateChange = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ApplicationStatus')
+    if self.applicationState is not None:
+      oprot.writeFieldBegin('applicationState', TType.STRING, 1)
+      oprot.writeString(self.applicationState)
+      oprot.writeFieldEnd()
+    if self.timeOfStateChange is not None:
+      oprot.writeFieldBegin('timeOfStateChange', TType.I64, 2)
+      oprot.writeI64(self.timeOfStateChange)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.applicationState is None:
+      raise TProtocol.TProtocolException(message='Required field applicationState is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ComputationalResourceScheduling:
+  """
+  A structure holding the Computational Resource Scheduling.
+
+
+  Attributes:
+   - resourceHostId
+   - totalCPUCount
+   - nodeCount
+   - numberOfThreads
+   - queueName
+   - wallTimeLimit
+   - jobStartTime
+   - totalPhysicalMemory
+   - computationalProjectAccount
+   - chassisName
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'resourceHostId', None, None, ), # 1
+    (2, TType.I32, 'totalCPUCount', None, None, ), # 2
+    (3, TType.I32, 'nodeCount', None, None, ), # 3
+    (4, TType.I32, 'numberOfThreads', None, None, ), # 4
+    (5, TType.STRING, 'queueName', None, None, ), # 5
+    (6, TType.I32, 'wallTimeLimit', None, None, ), # 6
+    (7, TType.I32, 'jobStartTime', None, None, ), # 7
+    (8, TType.I32, 'totalPhysicalMemory', None, None, ), # 8
+    (9, TType.STRING, 'computationalProjectAccount', None, None, ), # 9
+    (10, TType.STRING, 'chassisName', None, None, ), # 10
+  )
+
+  def __init__(self, resourceHostId=None, totalCPUCount=None, nodeCount=None, numberOfThreads=None, queueName=None, wallTimeLimit=None, jobStartTime=None, totalPhysicalMemory=None, computationalProjectAccount=None, chassisName=None,):
+    self.resourceHostId = resourceHostId
+    self.totalCPUCount = totalCPUCount
+    self.nodeCount = nodeCount
+    self.numberOfThreads = numberOfThreads
+    self.queueName = queueName
+    self.wallTimeLimit = wallTimeLimit
+    self.jobStartTime = jobStartTime
+    self.totalPhysicalMemory = totalPhysicalMemory
+    self.computationalProjectAccount = computationalProjectAccount
+    self.chassisName = chassisName
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.resourceHostId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.totalCPUCount = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.nodeCount = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I32:
+          self.numberOfThreads = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRING:
+          self.queueName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.I32:
+          self.wallTimeLimit = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.I32:
+          self.jobStartTime = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.I32:
+          self.totalPhysicalMemory = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.STRING:
+          self.computationalProjectAccount = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 10:
+        if ftype == TType.STRING:
+          self.chassisName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ComputationalResourceScheduling')
+    if self.resourceHostId is not None:
+      oprot.writeFieldBegin('resourceHostId', TType.STRING, 1)
+      oprot.writeString(self.resourceHostId)
+      oprot.writeFieldEnd()
+    if self.totalCPUCount is not None:
+      oprot.writeFieldBegin('totalCPUCount', TType.I32, 2)
+      oprot.writeI32(self.totalCPUCount)
+      oprot.writeFieldEnd()
+    if self.nodeCount is not None:
+      oprot.writeFieldBegin('nodeCount', TType.I32, 3)
+      oprot.writeI32(self.nodeCount)
+      oprot.writeFieldEnd()
+    if self.numberOfThreads is not None:
+      oprot.writeFieldBegin('numberOfThreads', TType.I32, 4)
+      oprot.writeI32(self.numberOfThreads)
+      oprot.writeFieldEnd()
+    if self.queueName is not None:
+      oprot.writeFieldBegin('queueName', TType.STRING, 5)
+      oprot.writeString(self.queueName)
+      oprot.writeFieldEnd()
+    if self.wallTimeLimit is not None:
+      oprot.writeFieldBegin('wallTimeLimit', TType.I32, 6)
+      oprot.writeI32(self.wallTimeLimit)
+      oprot.writeFieldEnd()
+    if self.jobStartTime is not None:
+      oprot.writeFieldBegin('jobStartTime', TType.I32, 7)
+      oprot.writeI32(self.jobStartTime)
+      oprot.writeFieldEnd()
+    if self.totalPhysicalMemory is not None:
+      oprot.writeFieldBegin('totalPhysicalMemory', TType.I32, 8)
+      oprot.writeI32(self.totalPhysicalMemory)
+      oprot.writeFieldEnd()
+    if self.computationalProjectAccount is not None:
+      oprot.writeFieldBegin('computationalProjectAccount', TType.STRING, 9)
+      oprot.writeString(self.computationalProjectAccount)
+      oprot.writeFieldEnd()
+    if self.chassisName is not None:
+      oprot.writeFieldBegin('chassisName', TType.STRING, 10)
+      oprot.writeString(self.chassisName)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class AdvancedInputDataHandling:
+  """
+  A structure holding specified input data handling.
+
+
+  Attributes:
+   - stageInputFilesToWorkingDir
+   - parentWorkingDirectory
+   - uniqueWorkingDirectory
+   - cleanUpWorkingDirAfterJob
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.BOOL, 'stageInputFilesToWorkingDir', None, False, ), # 1
+    (2, TType.STRING, 'parentWorkingDirectory', None, None, ), # 2
+    (3, TType.STRING, 'uniqueWorkingDirectory', None, None, ), # 3
+    (4, TType.BOOL, 'cleanUpWorkingDirAfterJob', None, False, ), # 4
+  )
+
+  def __init__(self, stageInputFilesToWorkingDir=thrift_spec[1][4], parentWorkingDirectory=None, uniqueWorkingDirectory=None, cleanUpWorkingDirAfterJob=thrift_spec[4][4],):
+    self.stageInputFilesToWorkingDir = stageInputFilesToWorkingDir
+    self.parentWorkingDirectory = parentWorkingDirectory
+    self.uniqueWorkingDirectory = uniqueWorkingDirectory
+    self.cleanUpWorkingDirAfterJob = cleanUpWorkingDirAfterJob
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.BOOL:
+          self.stageInputFilesToWorkingDir = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.parentWorkingDirectory = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.uniqueWorkingDirectory = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.BOOL:
+          self.cleanUpWorkingDirAfterJob = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('AdvancedInputDataHandling')
+    if self.stageInputFilesToWorkingDir is not None:
+      oprot.writeFieldBegin('stageInputFilesToWorkingDir', TType.BOOL, 1)
+      oprot.writeBool(self.stageInputFilesToWorkingDir)
+      oprot.writeFieldEnd()
+    if self.parentWorkingDirectory is not None:
+      oprot.writeFieldBegin('parentWorkingDirectory', TType.STRING, 2)
+      oprot.writeString(self.parentWorkingDirectory)
+      oprot.writeFieldEnd()
+    if self.uniqueWorkingDirectory is not None:
+      oprot.writeFieldBegin('uniqueWorkingDirectory', TType.STRING, 3)
+      oprot.writeString(self.uniqueWorkingDirectory)
+      oprot.writeFieldEnd()
+    if self.cleanUpWorkingDirAfterJob is not None:
+      oprot.writeFieldBegin('cleanUpWorkingDirAfterJob', TType.BOOL, 4)
+      oprot.writeBool(self.cleanUpWorkingDirAfterJob)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class AdvancedOutputDataHandling:
+  """
+  A structure holding specified output data handling.
+
+
+  Attributes:
+   - outputDataDir
+   - dataRegistryURL
+   - persistOutputData
+  """
+
+  thrift_spec = (
+    None, # 0
+    None, # 1
+    (2, TType.STRING, 'outputDataDir', None, None, ), # 2
+    (3, TType.STRING, 'dataRegistryURL', None, None, ), # 3
+    (4, TType.BOOL, 'persistOutputData', None, True, ), # 4
+  )
+
+  def __init__(self, outputDataDir=None, dataRegistryURL=None, persistOutputData=thrift_spec[4][4],):
+    self.outputDataDir = outputDataDir
+    self.dataRegistryURL = dataRegistryURL
+    self.persistOutputData = persistOutputData
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 2:
+        if ftype == TType.STRING:
+          self.outputDataDir = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.dataRegistryURL = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.BOOL:
+          self.persistOutputData = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('AdvancedOutputDataHandling')
+    if self.outputDataDir is not None:
+      oprot.writeFieldBegin('outputDataDir', TType.STRING, 2)
+      oprot.writeString(self.outputDataDir)
+      oprot.writeFieldEnd()
+    if self.dataRegistryURL is not None:
+      oprot.writeFieldBegin('dataRegistryURL', TType.STRING, 3)
+      oprot.writeString(self.dataRegistryURL)
+      oprot.writeFieldEnd()
+    if self.persistOutputData is not None:
+      oprot.writeFieldBegin('persistOutputData', TType.BOOL, 4)
+      oprot.writeBool(self.persistOutputData)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class QualityOfServiceParams:
+  """
+  A structure holding Quality of Service Parameters.
+
+
+  Attributes:
+   - startExecutionAt
+   - executeBefore
+   - numberofRetries
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'startExecutionAt', None, None, ), # 1
+    (2, TType.STRING, 'executeBefore', None, None, ), # 2
+    (3, TType.I32, 'numberofRetries', None, None, ), # 3
+  )
+
+  def __init__(self, startExecutionAt=None, executeBefore=None, numberofRetries=None,):
+    self.startExecutionAt = startExecutionAt
+    self.executeBefore = executeBefore
+    self.numberofRetries = numberofRetries
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.startExecutionAt = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.executeBefore = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.numberofRetries = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('QualityOfServiceParams')
+    if self.startExecutionAt is not None:
+      oprot.writeFieldBegin('startExecutionAt', TType.STRING, 1)
+      oprot.writeString(self.startExecutionAt)
+      oprot.writeFieldEnd()
+    if self.executeBefore is not None:
+      oprot.writeFieldBegin('executeBefore', TType.STRING, 2)
+      oprot.writeString(self.executeBefore)
+      oprot.writeFieldEnd()
+    if self.numberofRetries is not None:
+      oprot.writeFieldBegin('numberofRetries', TType.I32, 3)
+      oprot.writeI32(self.numberofRetries)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class UserConfigurationData:
+  """
+  A structure holding the experiment configuration.
+
+
+
+  Attributes:
+   - airavataAutoSchedule
+   - overrideManualScheduledParams
+   - shareExperimentPublicly
+   - computationalResourceScheduling
+   - advanceInputDataHandling
+   - advanceOutputDataHandling
+   - qosParams
+   - throttleResources
+   - userDN
+   - generateCert
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.BOOL, 'airavataAutoSchedule', None, False, ), # 1
+    (2, TType.BOOL, 'overrideManualScheduledParams', None, False, ), # 2
+    (3, TType.BOOL, 'shareExperimentPublicly', None, False, ), # 3
+    (4, TType.STRUCT, 'computationalResourceScheduling', (ComputationalResourceScheduling, ComputationalResourceScheduling.thrift_spec), None, ), # 4
+    (5, TType.STRUCT, 'advanceInputDataHandling', (AdvancedInputDataHandling, AdvancedInputDataHandling.thrift_spec), None, ), # 5
+    (6, TType.STRUCT, 'advanceOutputDataHandling', (AdvancedOutputDataHandling, AdvancedOutputDataHandling.thrift_spec), None, ), # 6
+    (7, TType.STRUCT, 'qosParams', (QualityOfServiceParams, QualityOfServiceParams.thrift_spec), None, ), # 7
+    (8, TType.BOOL, 'throttleResources', None, False, ), # 8
+    (9, TType.STRING, 'userDN', None, None, ), # 9
+    (10, TType.BOOL, 'generateCert', None, False, ), # 10
+  )
+
+  def __init__(self, airavataAutoSchedule=thrift_spec[1][4], overrideManualScheduledParams=thrift_spec[2][4], shareExperimentPublicly=thrift_spec[3][4], computationalResourceScheduling=None, advanceInputDataHandling=None, advanceOutputDataHandling=None, qosParams=None, throttleResources=thrift_spec[8][4], userDN=None, generateCert=thrift_spec[10][4],):
+    self.airavataAutoSchedule = airavataAutoSchedule
+    self.overrideManualScheduledParams = overrideManualScheduledParams
+    self.shareExperimentPublicly = shareExperimentPublicly
+    self.computationalResourceScheduling = computationalResourceScheduling
+    self.advanceInputDataHandling = advanceInputDataHandling
+    self.advanceOutputDataHandling = advanceOutputDataHandling
+    self.qosParams = qosParams
+    self.throttleResources = throttleResources
+    self.userDN = userDN
+    self.generateCert = generateCert
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.BOOL:
+          self.airavataAutoSchedule = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.BOOL:
+          self.overrideManualScheduledParams = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.BOOL:
+          self.shareExperimentPublicly = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRUCT:
+          self.computationalResourceScheduling = ComputationalResourceScheduling()
+          self.computationalResourceScheduling.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRUCT:
+          self.advanceInputDataHandling = AdvancedInputDataHandling()
+          self.advanceInputDataHandling.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.STRUCT:
+          self.advanceOutputDataHandling = AdvancedOutputDataHandling()
+          self.advanceOutputDataHandling.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.STRUCT:
+          self.qosParams = QualityOfServiceParams()
+          self.qosParams.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.BOOL:
+          self.throttleResources = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.STRING:
+          self.userDN = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 10:
+        if ftype == TType.BOOL:
+          self.generateCert = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('UserConfigurationData')
+    if self.airavataAutoSchedule is not None:
+      oprot.writeFieldBegin('airavataAutoSchedule', TType.BOOL, 1)
+      oprot.writeBool(self.airavataAutoSchedule)
+      oprot.writeFieldEnd()
+    if self.overrideManualScheduledParams is not None:
+      oprot.writeFieldBegin('overrideManualScheduledParams', TType.BOOL, 2)
+      oprot.writeBool(self.overrideManualScheduledParams)
+      oprot.writeFieldEnd()
+    if self.shareExperimentPublicly is not None:
+      oprot.writeFieldBegin('shareExperimentPublicly', TType.BOOL, 3)
+      oprot.writeBool(self.shareExperimentPublicly)
+      oprot.writeFieldEnd()
+    if self.computationalResourceScheduling is not None:
+      oprot.writeFieldBegin('computationalResourceScheduling', TType.STRUCT, 4)
+      self.computationalResourceScheduling.write(oprot)
+      oprot.writeFieldEnd()
+    if self.advanceInputDataHandling is not None:
+      oprot.writeFieldBegin('advanceInputDataHandling', TType.STRUCT, 5)
+      self.advanceInputDataHandling.write(oprot)
+      oprot.writeFieldEnd()
+    if self.advanceOutputDataHandling is not None:
+      oprot.writeFieldBegin('advanceOutputDataHandling', TType.STRUCT, 6)
+      self.advanceOutputDataHandling.write(oprot)
+      oprot.writeFieldEnd()
+    if self.qosParams is not None:
+      oprot.writeFieldBegin('qosParams', TType.STRUCT, 7)
+      self.qosParams.write(oprot)
+      oprot.writeFieldEnd()
+    if self.throttleResources is not None:
+      oprot.writeFieldBegin('throttleResources', TType.BOOL, 8)
+      oprot.writeBool(self.throttleResources)
+      oprot.writeFieldEnd()
+    if self.userDN is not None:
+      oprot.writeFieldBegin('userDN', TType.STRING, 9)
+      oprot.writeString(self.userDN)
+      oprot.writeFieldEnd()
+    if self.generateCert is not None:
+      oprot.writeFieldBegin('generateCert', TType.BOOL, 10)
+      oprot.writeBool(self.generateCert)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.airavataAutoSchedule is None:
+      raise TProtocol.TProtocolException(message='Required field airavataAutoSchedule is unset!')
+    if self.overrideManualScheduledParams is None:
+      raise TProtocol.TProtocolException(message='Required field overrideManualScheduledParams is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ErrorDetails:
+  """
+  Attributes:
+   - errorID
+   - creationTime
+   - actualErrorMessage
+   - userFriendlyMessage
+   - errorCategory
+   - transientOrPersistent
+   - correctiveAction
+   - actionableGroup
+   - rootCauseErrorIdList
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'errorID', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I64, 'creationTime', None, None, ), # 2
+    (3, TType.STRING, 'actualErrorMessage', None, None, ), # 3
+    (4, TType.STRING, 'userFriendlyMessage', None, None, ), # 4
+    (5, TType.I32, 'errorCategory', None, None, ), # 5
+    (6, TType.BOOL, 'transientOrPersistent', None, False, ), # 6
+    (7, TType.I32, 'correctiveAction', None, None, ), # 7
+    (8, TType.I32, 'actionableGroup', None, None, ), # 8
+    (9, TType.LIST, 'rootCauseErrorIdList', (TType.STRING,None), None, ), # 9
+  )
+
+  def __init__(self, errorID=thrift_spec[1][4], creationTime=None, actualErrorMessage=None, userFriendlyMessage=None, errorCategory=None, transientOrPersistent=thrift_spec[6][4], correctiveAction=None, actionableGroup=None, rootCauseErrorIdList=None,):
+    self.errorID = errorID
+    self.creationTime = creationTime
+    self.actualErrorMessage = actualErrorMessage
+    self.userFriendlyMessage = userFriendlyMessage
+    self.errorCategory = errorCategory
+    self.transientOrPersistent = transientOrPersistent
+    self.correctiveAction = correctiveAction
+    self.actionableGroup = actionableGroup
+    self.rootCauseErrorIdList = rootCauseErrorIdList
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.errorID = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.creationTime = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.actualErrorMessage = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.userFriendlyMessage = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.I32:
+          self.errorCategory = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.BOOL:
+          self.transientOrPersistent = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.I32:
+          self.correctiveAction = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.I32:
+          self.actionableGroup = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.LIST:
+          self.rootCauseErrorIdList = []
+          (_etype3, _size0) = iprot.readListBegin()
+          for _i4 in xrange(_size0):
+            _elem5 = iprot.readString();
+            self.rootCauseErrorIdList.append(_elem5)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ErrorDetails')
+    if self.errorID is not None:
+      oprot.writeFieldBegin('errorID', TType.STRING, 1)
+      oprot.writeString(self.errorID)
+      oprot.writeFieldEnd()
+    if self.creationTime is not None:
+      oprot.writeFieldBegin('creationTime', TType.I64, 2)
+      oprot.writeI64(self.creationTime)
+      oprot.writeFieldEnd()
+    if self.actualErrorMessage is not None:
+      oprot.writeFieldBegin('actualErrorMessage', TType.STRING, 3)
+      oprot.writeString(self.actualErrorMessage)
+      oprot.writeFieldEnd()
+    if self.userFriendlyMessage is not None:
+      oprot.writeFieldBegin('userFriendlyMessage', TType.STRING, 4)
+      oprot.writeString(self.userFriendlyMessage)
+      oprot.writeFieldEnd()
+    if self.errorCategory is not None:
+      oprot.writeFieldBegin('errorCategory', TType.I32, 5)
+      oprot.writeI32(self.errorCategory)
+      oprot.writeFieldEnd()
+    if self.transientOrPersistent is not None:
+      oprot.writeFieldBegin('transientOrPersistent', TType.BOOL, 6)
+      oprot.writeBool(self.transientOrPersistent)
+      oprot.writeFieldEnd()
+    if self.correctiveAction is not None:
+      oprot.writeFieldBegin('correctiveAction', TType.I32, 7)
+      oprot.writeI32(self.correctiveAction)
+      oprot.writeFieldEnd()
+    if self.actionableGroup is not None:
+      oprot.writeFieldBegin('actionableGroup', TType.I32, 8)
+      oprot.writeI32(self.actionableGroup)
+      oprot.writeFieldEnd()
+    if self.rootCauseErrorIdList is not None:
+      oprot.writeFieldBegin('rootCauseErrorIdList', TType.LIST, 9)
+      oprot.writeListBegin(TType.STRING, len(self.rootCauseErrorIdList))
+      for iter6 in self.rootCauseErrorIdList:
+        oprot.writeString(iter6)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.errorID is None:
+      raise TProtocol.TProtocolException(message='Required field errorID is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class JobDetails:
+  """
+  Attributes:
+   - jobID
+   - jobDescription
+   - creationTime
+   - jobStatus
+   - applicationStatus
+   - errors
+   - computeResourceConsumed
+   - jobName
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'jobID', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.STRING, 'jobDescription', None, None, ), # 2
+    (3, TType.I64, 'creationTime', None, None, ), # 3
+    (4, TType.STRUCT, 'jobStatus', (JobStatus, JobStatus.thrift_spec), None, ), # 4
+    (5, TType.STRUCT, 'applicationStatus', (ApplicationStatus, ApplicationStatus.thrift_spec), None, ), # 5
+    (6, TType.LIST, 'errors', (TType.STRUCT,(ErrorDetails, ErrorDetails.thrift_spec)), None, ), # 6
+    (7, TType.STRING, 'computeResourceConsumed', None, None, ), # 7
+    (8, TType.STRING, 'jobName', None, None, ), # 8
+  )
+
+  def __init__(self, jobID=thrift_spec[1][4], jobDescription=None, creationTime=None, jobStatus=None, applicationStatus=None, errors=None, computeResourceConsumed=None, jobName=None,):
+    self.jobID = jobID
+    self.jobDescription = jobDescription
+    self.creationTime = creationTime
+    self.jobStatus = jobStatus
+    self.applicationStatus = applicationStatus
+    self.errors = errors
+    self.computeResourceConsumed = computeResourceConsumed
+    self.jobName = jobName
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.jobID = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.jobDescription = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I64:
+          self.creationTime = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRUCT:
+          self.jobStatus = JobStatus()
+          self.jobStatus.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRUCT:
+          self.applicationStatus = ApplicationStatus()
+          self.applicationStatus.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.LIST:
+          self.errors = []
+          (_etype10, _size7) = iprot.readListBegin()
+          for _i11 in xrange(_size7):
+            _elem12 = ErrorDetails()
+            _elem12.read(iprot)
+            self.errors.append(_elem12)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.STRING:
+          self.computeResourceConsumed = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.STRING:
+          self.jobName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('JobDetails')
+    if self.jobID is not None:
+      oprot.writeFieldBegin('jobID', TType.STRING, 1)
+      oprot.writeString(self.jobID)
+      oprot.writeFieldEnd()
+    if self.jobDescription is not None:
+      oprot.writeFieldBegin('jobDescription', TType.STRING, 2)
+      oprot.writeString(self.jobDescription)
+      oprot.writeFieldEnd()
+    if self.creationTime is not None:
+      oprot.writeFieldBegin('creationTime', TType.I64, 3)
+      oprot.writeI64(self.creationTime)
+      oprot.writeFieldEnd()
+    if self.jobStatus is not None:
+      oprot.writeFieldBegin('jobStatus', TType.STRUCT, 4)
+      self.jobStatus.write(oprot)
+      oprot.writeFieldEnd()
+    if self.applicationStatus is not None:
+      oprot.writeFieldBegin('applicationStatus', TType.STRUCT, 5)
+      self.applicationStatus.write(oprot)
+      oprot.writeFieldEnd()
+    if self.errors is not None:
+      oprot.writeFieldBegin('errors', TType.LIST, 6)
+      oprot.writeListBegin(TType.STRUCT, len(self.errors))
+      for iter13 in self.errors:
+        iter13.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.computeResourceConsumed is not None:
+      oprot.writeFieldBegin('computeResourceConsumed', TType.STRING, 7)
+      oprot.writeString(self.computeResourceConsumed)
+      oprot.writeFieldEnd()
+    if self.jobName is not None:
+      oprot.writeFieldBegin('jobName', TType.STRING, 8)
+      oprot.writeString(self.jobName)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobID is None:
+      raise TProtocol.TProtocolException(message='Required field jobID is unset!')
+    if self.jobDescription is None:
+      raise TProtocol.TProtocolException(message='Required field jobDescription is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class DataTransferDetails:
+  """
+  Attributes:
+   - transferID
+   - creationTime
+   - transferDescription
+   - transferStatus
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'transferID', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I64, 'creationTime', None, None, ), # 2
+    (3, TType.STRING, 'transferDescription', None, None, ), # 3
+    (4, TType.STRUCT, 'transferStatus', (TransferStatus, TransferStatus.thrift_spec), None, ), # 4
+  )
+
+  def __init__(self, transferID=thrift_spec[1][4], creationTime=None, transferDescription=None, transferStatus=None,):
+    self.transferID = transferID
+    self.creationTime = creationTime
+    self.transferDescription = transferDescription
+    self.transferStatus = transferStatus
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.transferID = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.creationTime = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.transferDescription = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRUCT:
+          self.transferStatus = TransferStatus()
+          self.transferStatus.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('DataTransferDetails')
+    if self.transferID is not None:
+      oprot.writeFieldBegin('transferID', TType.STRING, 1)
+      oprot.writeString(self.transferID)
+      oprot.writeFieldEnd()
+    if self.creationTime is not None:
+      oprot.writeFieldBegin('creationTime', TType.I64, 2)
+      oprot.writeI64(self.creationTime)
+      oprot.writeFieldEnd()
+    if self.transferDescription is not None:
+      oprot.writeFieldBegin('transferDescription', TType.STRING, 3)
+      oprot.writeString(self.transferDescription)
+      oprot.writeFieldEnd()
+    if self.transferStatus is not None:
+      oprot.writeFieldBegin('transferStatus', TType.STRUCT, 4)
+      self.transferStatus.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.transferID is None:
+      raise TProtocol.TProtocolException(message='Required field transferID is unset!')
+    if self.transferDescription is None:
+      raise TProtocol.TProtocolException(message='Required field transferDescription is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TaskDetails:
+  """
+  A structure holding the actual execution context decided based on user provided configuration data or system inferred
+    information from scheduling and QoS parameters. One experiment can have multiple tasks. Each tasks results in
+    data transfers and jobs
+
+
+  Attributes:
+   - taskID
+   - creationTime
+   - applicationId
+   - applicationVersion
+   - applicationDeploymentId
+   - applicationInputs
+   - applicationOutputs
+   - taskScheduling
+   - advancedInputDataHandling
+   - advancedOutputDataHandling
+   - taskStatus
+   - jobDetailsList
+   - dataTransferDetailsList
+   - errors
+   - enableEmailNotification
+   - emailAddresses
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'taskID', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I64, 'creationTime', None, None, ), # 2
+    (3, TType.STRING, 'applicationId', None, None, ), # 3
+    (4, TType.STRING, 'applicationVersion', None, None, ), # 4
+    (5, TType.STRING, 'applicationDeploymentId', None, None, ), # 5
+    (6, TType.LIST, 'applicationInputs', (TType.STRUCT,(apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType, apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType.thrift_spec)), None, ), # 6
+    (7, TType.LIST, 'applicationOutputs', (TType.STRUCT,(apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType, apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType.thrift_spec)), None, ), # 7
+    (8, TType.STRUCT, 'taskScheduling', (ComputationalResourceScheduling, ComputationalResourceScheduling.thrift_spec), None, ), # 8
+    (9, TType.STRUCT, 'advancedInputDataHandling', (AdvancedInputDataHandling, AdvancedInputDataHandling.thrift_spec), None, ), # 9
+    (10, TType.STRUCT, 'advancedOutputDataHandling', (AdvancedOutputDataHandling, AdvancedOutputDataHandling.thrift_spec), None, ), # 10
+    (11, TType.STRUCT, 'taskStatus', (TaskStatus, TaskStatus.thrift_spec), None, ), # 11
+    (12, TType.LIST, 'jobDetailsList', (TType.STRUCT,(JobDetails, JobDetails.thrift_spec)), None, ), # 12
+    (13, TType.LIST, 'dataTransferDetailsList', (TType.STRUCT,(DataTransferDetails, DataTransferDetails.thrift_spec)), None, ), # 13
+    (14, TType.LIST, 'errors', (TType.STRUCT,(ErrorDetails, ErrorDetails.thrift_spec)), None, ), # 14
+    (15, TType.BOOL, 'enableEmailNotification', None, None, ), # 15
+    (16, TType.LIST, 'emailAddresses', (TType.STRING,None), None, ), # 16
+  )
+
+  def __init__(self, taskID=thrift_spec[1][4], creationTime=None, applicationId=None, applicationVersion=None, applicationDeploymentId=None, applicationInputs=None, applicationOutputs=None, taskScheduling=None, advancedInputDataHandling=None, advancedOutputDataHandling=None, taskStatus=None, jobDetailsList=None, dataTransferDetailsList=None, errors=None, enableEmailNotification=None, emailAddresses=None,):
+    self.taskID = taskID
+    self.creationTime = creationTime
+    self.applicationId = applicationId
+    self.applicationVersion = applicationVersion
+    self.applicationDeploymentId = applicationDeploymentId
+    self.applicationInputs = applicationInputs
+    self.applicationOutputs = applicationOutputs
+    self.taskScheduling = taskScheduling
+    self.advancedInputDataHandling = advancedInputDataHandling
+    self.advancedOutputDataHandling = advancedOutputDataHandling
+    self.taskStatus = taskStatus
+    self.jobDetailsList = jobDetailsList
+    self.dataTransferDetailsList = dataTransferDetailsList
+    self.errors = errors
+    self.enableEmailNotification = enableEmailNotification
+    self.emailAddresses = emailAddresses
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.taskID = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.creationTime = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.applicationId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.applicationVersion = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRING:
+          self.applicationDeploymentId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.LIST:
+          self.applicationInputs = []
+          (_etype17, _size14) = iprot.readListBegin()
+          for _i18 in xrange(_size14):
+            _elem19 = apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType()
+            _elem19.read(iprot)
+            self.applicationInputs.append(_elem19)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.LIST:
+          self.applicationOutputs = []
+          (_etype23, _size20) = iprot.readListBegin()
+          for _i24 in xrange(_size20):
+            _elem25 = apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType()
+            _elem25.read(iprot)
+            self.applicationOutputs.append(_elem25)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.STRUCT:
+          self.taskScheduling = ComputationalResourceScheduling()
+          self.taskScheduling.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.STRUCT:
+          self.advancedInputDataHandling = AdvancedInputDataHandling()
+          self.advancedInputDataHandling.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 10:
+        if ftype == TType.STRUCT:
+          self.advancedOutputDataHandling = AdvancedOutputDataHandling()
+          self.advancedOutputDataHandling.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 11:
+        if ftype == TType.STRUCT:
+          self.taskStatus = TaskStatus()
+          self.taskStatus.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 12:
+        if ftype == TType.LIST:
+          self.jobDetailsList = []
+          (_etype29, _size26) = iprot.readListBegin()
+          for _i30 in xrange(_size26):
+            _elem31 = JobDetails()
+            _elem31.read(iprot)
+            self.jobDetailsList.append(_elem31)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 13:
+        if ftype == TType.LIST:
+          self.dataTransferDetailsList = []
+          (_etype35, _size32) = iprot.readListBegin()
+          for _i36 in xrange(_size32):
+            _elem37 = DataTransferDetails()
+            _elem37.read(iprot)
+            self.dataTransferDetailsList.append(_elem37)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 14:
+        if ftype == TType.LIST:
+          self.errors = []
+          (_etype41, _size38) = iprot.readListBegin()
+          for _i42 in xrange(_size38):
+            _elem43 = ErrorDetails()
+            _elem43.read(iprot)
+            self.errors.append(_elem43)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 15:
+        if ftype == TType.BOOL:
+          self.enableEmailNotification = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 16:
+        if ftype == TType.LIST:
+          self.emailAddresses = []
+          (_etype47, _size44) = iprot.readListBegin()
+          for _i48 in xrange(_size44):
+            _elem49 = iprot.readString();
+            self.emailAddresses.append(_elem49)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TaskDetails')
+    if self.taskID is not None:
+      oprot.writeFieldBegin('taskID', TType.STRING, 1)
+      oprot.writeString(self.taskID)
+      oprot.writeFieldEnd()
+    if self.creationTime is not None:
+      oprot.writeFieldBegin('creationTime', TType.I64, 2)
+      oprot.writeI64(self.creationTime)
+      oprot.writeFieldEnd()
+    if self.applicationId is not None:
+      oprot.writeFieldBegin('applicationId', TType.STRING, 3)
+      oprot.writeString(self.applicationId)
+      oprot.writeFieldEnd()
+    if self.applicationVersion is not None:
+      oprot.writeFieldBegin('applicationVersion', TType.STRING, 4)
+      oprot.writeString(self.applicationVersion)
+      oprot.writeFieldEnd()
+    if self.applicationDeploymentId is not None:
+      oprot.writeFieldBegin('applicationDeploymentId', TType.STRING, 5)
+      oprot.writeString(self.applicationDeploymentId)
+      oprot.writeFieldEnd()
+    if self.applicationInputs is not None:
+      oprot.writeFieldBegin('applicationInputs', TType.LIST, 6)
+      oprot.writeListBegin(TType.STRUCT, len(self.applicationInputs))
+      for iter50 in self.applicationInputs:
+        iter50.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.applicationOutputs is not None:
+      oprot.writeFieldBegin('applicationOutputs', TType.LIST, 7)
+      oprot.writeListBegin(TType.STRUCT, len(self.applicationOutputs))
+      for iter51 in self.applicationOutputs:
+        iter51.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.taskScheduling is not None:
+      oprot.writeFieldBegin('taskScheduling', TType.STRUCT, 8)
+      self.taskScheduling.write(oprot)
+      oprot.writeFieldEnd()
+    if self.advancedInputDataHandling is not None:
+      oprot.writeFieldBegin('advancedInputDataHandling', TType.STRUCT, 9)
+      self.advancedInputDataHandling.write(oprot)
+      oprot.writeFieldEnd()
+    if self.advancedOutputDataHandling is not None:
+      oprot.writeFieldBegin('advancedOutputDataHandling', TType.STRUCT, 10)
+      self.advancedOutputDataHandling.write(oprot)
+      oprot.writeFieldEnd()
+    if self.taskStatus is not None:
+      oprot.writeFieldBegin('taskStatus', TType.STRUCT, 11)
+      self.taskStatus.write(oprot)
+      oprot.writeFieldEnd()
+    if self.jobDetailsList is not None:
+      oprot.writeFieldBegin('jobDetailsList', TType.LIST, 12)
+      oprot.writeListBegin(TType.STRUCT, len(self.jobDetailsList))
+      for iter52 in self.jobDetailsList:
+        iter52.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.dataTransferDetailsList is not None:
+      oprot.writeFieldBegin('dataTransferDetailsList', TType.LIST, 13)
+      oprot.writeListBegin(TType.STRUCT, len(self.dataTransferDetailsList))
+      for iter53 in self.dataTransferDetailsList:
+        iter53.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.errors is not None:
+      oprot.writeFieldBegin('errors', TType.LIST, 14)
+      oprot.writeListBegin(TType.STRUCT, len(self.errors))
+      for iter54 in self.errors:
+        iter54.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.enableEmailNotification is not None:
+      oprot.writeFieldBegin('enableEmailNotification', TType.BOOL, 15)
+      oprot.writeBool(self.enableEmailNotification)
+      oprot.writeFieldEnd()
+    if self.emailAddresses is not None:
+      oprot.writeFieldBegin('emailAddresses', TType.LIST, 16)
+      oprot.writeListBegin(TType.STRING, len(self.emailAddresses))
+      for iter55 in self.emailAddresses:
+        oprot.writeString(iter55)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.taskID is None:
+      raise TProtocol.TProtocolException(message='Required field taskID is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class WorkflowNodeDetails:
+  """
+  A structure holding the node data.
+  nodeInstanceId - unique node identifier for each run
+
+  Attributes:
+   - nodeInstanceId
+   - creationTime
+   - nodeName
+   - executionUnit
+   - executionUnitData
+   - nodeInputs
+   - nodeOutputs
+   - workflowNodeStatus
+   - taskDetailsList
+   - errors
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'nodeInstanceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I64, 'creationTime', None, None, ), # 2
+    (3, TType.STRING, 'nodeName', None, "SINGLE_APP_NODE", ), # 3
+    (4, TType.I32, 'executionUnit', None,     1, ), # 4
+    (5, TType.STRING, 'executionUnitData', None, None, ), # 5
+    (6, TType.LIST, 'nodeInputs', (TType.STRUCT,(apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType, apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType.thrift_spec)), None, ), # 6
+    (7, TType.LIST, 'nodeOutputs', (TType.STRUCT,(apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType, apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType.thrift_spec)), None, ), # 7
+    (8, TType.STRUCT, 'workflowNodeStatus', (WorkflowNodeStatus, WorkflowNodeStatus.thrift_spec), None, ), # 8
+    (9, TType.LIST, 'taskDetailsList', (TType.STRUCT,(TaskDetails, TaskDetails.thrift_spec)), None, ), # 9
+    (10, TType.LIST, 'errors', (TType.STRUCT,(ErrorDetails, ErrorDetails.thrift_spec)), None, ), # 10
+  )
+
+  def __init__(self, nodeInstanceId=thrift_spec[1][4], creationTime=None, nodeName=thrift_spec[3][4], executionUnit=thrift_spec[4][4], executionUnitData=None, nodeInputs=None, nodeOutputs=None, workflowNodeStatus=None, taskDetailsList=None, errors=None,):
+    self.nodeInstanceId = nodeInstanceId
+    self.creationTime = creationTime
+    self.nodeName = nodeName
+    self.executionUnit = executionUnit
+    self.executionUnitData = executionUnitData
+    self.nodeInputs = nodeInputs
+    self.nodeOutputs = nodeOutputs
+    self.workflowNodeStatus = workflowNodeStatus
+    self.taskDetailsList = taskDetailsList
+    self.errors = errors
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.nodeInstanceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I64:
+          self.creationTime = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.nodeName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I32:
+          self.executionUnit = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRING:
+          self.executionUnitData = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.LIST:
+          self.nodeInputs = []
+          (_etype59, _size56) = iprot.readListBegin()
+          for _i60 in xrange(_size56):
+            _elem61 = apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType()
+            _elem61.read(iprot)
+            self.nodeInputs.append(_elem61)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.LIST:
+          self.nodeOutputs = []
+          (_etype65, _size62) = iprot.readListBegin()
+          for _i66 in xrange(_size62):
+            _elem67 = apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType()
+            _elem67.read(iprot)
+            self.nodeOutputs.append(_elem67)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.STRUCT:
+          self.workflowNodeStatus = WorkflowNodeStatus()
+          self.workflowNodeStatus.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.LIST:
+          self.taskDetailsList = []
+          (_etype71, _size68) = iprot.readListBegin()
+          for _i72 in xrange(_size68):
+            _elem73 = TaskDetails()
+            _elem73.read(iprot)
+            self.taskDetailsList.append(_elem73)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 10:
+        if ftype == TType.LIST:
+          self.errors = []
+          (_etype77, _size74) = iprot.readListBegin()
+          for _i78 in xrange(_size74):
+            _elem79 = ErrorDetails()
+            _elem79.read(iprot)
+            self.errors.append(_elem79)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('WorkflowNodeDetails')
+    if self.nodeInstanceId is not None:
+      oprot.writeFieldBegin('nodeInstanceId', TType.STRING, 1)
+      oprot.writeString(self.nodeInstanceId)
+      oprot.writeFieldEnd()
+    if self.creationTime is not None:
+      oprot.writeFieldBegin('creationTime', TType.I64, 2)
+      oprot.writeI64(self.creationTime)
+      oprot.writeFieldEnd()
+    if self.nodeName is not None:
+      oprot.writeFieldBegin('nodeName', TType.STRING, 3)
+      oprot.writeString(self.nodeName)
+      oprot.writeFieldEnd()
+    if self.executionUnit is not None:
+      oprot.writeFieldBegin('executionUnit', TType.I32, 4)
+      oprot.writeI32(self.executionUnit)
+      oprot.writeFieldEnd()
+    if self.executionUnitData is not None:
+      oprot.writeFieldBegin('executionUnitData', TType.STRING, 5)
+      oprot.writeString(self.executionUnitData)
+      oprot.writeFieldEnd()
+    if self.nodeInputs is not None:
+      oprot.writeFieldBegin('nodeInputs', TType.LIST, 6)
+      oprot.writeListBegin(TType.STRUCT, len(self.nodeInputs))
+      for iter80 in self.nodeInputs:
+        iter80.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.nodeOutputs is not None:
+      oprot.writeFieldBegin('nodeOutputs', TType.LIST, 7)
+      oprot.writeListBegin(TType.STRUCT, len(self.nodeOutputs))
+      for iter81 in self.nodeOutputs:
+        iter81.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.workflowNodeStatus is not None:
+      oprot.writeFieldBegin('workflowNodeStatus', TType.STRUCT, 8)
+      self.workflowNodeStatus.write(oprot)
+      oprot.writeFieldEnd()
+    if self.taskDetailsList is not None:
+      oprot.writeFieldBegin('taskDetailsList', TType.LIST, 9)
+      oprot.writeListBegin(TType.STRUCT, len(self.taskDetailsList))
+      for iter82 in self.taskDetailsList:
+        iter82.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.errors is not None:
+      oprot.writeFieldBegin('errors', TType.LIST, 10)
+      oprot.writeListBegin(TType.STRUCT, len(self.errors))
+      for iter83 in self.errors:
+        iter83.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.nodeInstanceId is None:
+      raise TProtocol.TProtocolException(message='Required field nodeInstanceId is unset!')
+    if self.nodeName is None:
+      raise TProtocol.TProtocolException(message='Required field nodeName is unset!')
+    if self.executionUnit is None:
+      raise TProtocol.TProtocolException(message='Required field executionUnit is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ValidatorResult:
+  """
+  This data structure can be used to store the validation results
+  captured during validation step and during the launchExperiment
+  operation it can be easilly checked to see the errors occured
+  during the experiment launch operation
+
+
+  Attributes:
+   - result
+   - errorDetails
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.BOOL, 'result', None, None, ), # 1
+    (2, TType.STRING, 'errorDetails', None, None, ), # 2
+  )
+
+  def __init__(self, result=None, errorDetails=None,):
+    self.result = result
+    self.errorDetails = errorDetails
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.BOOL:
+          self.result = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.errorDetails = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ValidatorResult')
+    if self.result is not None:
+      oprot.writeFieldBegin('result', TType.BOOL, 1)
+      oprot.writeBool(self.result)
+      oprot.writeFieldEnd()
+    if self.errorDetails is not None:
+      oprot.writeFieldBegin('errorDetails', TType.STRING, 2)
+      oprot.writeString(self.errorDetails)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.result is None:
+      raise TProtocol.TProtocolException(message='Required field result is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ValidationResults:
+  """
+  Attributes:
+   - validationState
+   - validationResultList
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.BOOL, 'validationState', None, None, ), # 1
+    (2, TType.LIST, 'validationResultList', (TType.STRUCT,(ValidatorResult, ValidatorResult.thrift_spec)), None, ), # 2
+  )
+
+  def __init__(self, validationState=None, validationResultList=None,):
+    self.validationState = validationState
+    self.validationResultList = validationResultList
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.BOOL:
+          self.validationState = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.LIST:
+          self.validationResultList = []
+          (_etype87, _size84) = iprot.readListBegin()
+          for _i88 in xrange(_size84):
+            _elem89 = ValidatorResult()
+            _elem89.read(iprot)
+            self.validationResultList.append(_elem89)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ValidationResults')
+    if self.validationState is not None:
+      oprot.writeFieldBegin('validationState', TType.BOOL, 1)
+      oprot.writeBool(self.validationState)
+      oprot.writeFieldEnd()
+    if self.validationResultList is not None:
+      oprot.writeFieldBegin('validationResultList', TType.LIST, 2)
+      oprot.writeListBegin(TType.STRUCT, len(self.validationResultList))
+      for iter90 in self.validationResultList:
+        iter90.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.validationState is None:
+      raise TProtocol.TProtocolException(message='Required field validationState is unset!')
+    if self.validationResultList is None:
+      raise TProtocol.TProtocolException(message='Required field validationResultList is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class Experiment:
+  """
+  A structure holding the experiment metadata and its child models.
+
+  userName:
+    The user name of the targeted gateway end user on whose behalf the experiment is being created.
+      the associated gateway identity can only be inferred from the security hand-shake so as to avoid
+      authorized Airavata Clients mimicking an unauthorized request. If a gateway is not registered with
+      Airavata, an authorization exception is thrown.
+
+  experimentName:
+    The name of the experiment as defined by the user. The name need not be unique as uniqueness is enforced
+       by the generated experiment id.
+
+

<TRUNCATED>

[4/8] airavata git commit: Adding python generated code - AIRAVATA-1642

Posted by sm...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/ttypes.py
new file mode 100644
index 0000000..279231b
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/ttypes.py
@@ -0,0 +1,1234 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import apache.airavata.model.workspace.experiment.ttypes
+import apache.airavata.model.appcatalog.appinterface.ttypes
+
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+class MessageLevel:
+  INFO = 0
+  DEBUG = 1
+  ERROR = 2
+  ACK = 3
+
+  _VALUES_TO_NAMES = {
+    0: "INFO",
+    1: "DEBUG",
+    2: "ERROR",
+    3: "ACK",
+  }
+
+  _NAMES_TO_VALUES = {
+    "INFO": 0,
+    "DEBUG": 1,
+    "ERROR": 2,
+    "ACK": 3,
+  }
+
+class MessageType:
+  EXPERIMENT = 0
+  TASK = 1
+  WORKFLOWNODE = 2
+  JOB = 3
+  LAUNCHTASK = 4
+  TERMINATETASK = 5
+
+  _VALUES_TO_NAMES = {
+    0: "EXPERIMENT",
+    1: "TASK",
+    2: "WORKFLOWNODE",
+    3: "JOB",
+    4: "LAUNCHTASK",
+    5: "TERMINATETASK",
+  }
+
+  _NAMES_TO_VALUES = {
+    "EXPERIMENT": 0,
+    "TASK": 1,
+    "WORKFLOWNODE": 2,
+    "JOB": 3,
+    "LAUNCHTASK": 4,
+    "TERMINATETASK": 5,
+  }
+
+
+class ExperimentStatusChangeEvent:
+  """
+  Attributes:
+   - state
+   - experimentId
+   - gatewayId
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'state', None, None, ), # 1
+    (2, TType.STRING, 'experimentId', None, None, ), # 2
+    (3, TType.STRING, 'gatewayId', None, None, ), # 3
+  )
+
+  def __init__(self, state=None, experimentId=None, gatewayId=None,):
+    self.state = state
+    self.experimentId = experimentId
+    self.gatewayId = gatewayId
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.state = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.experimentId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.gatewayId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ExperimentStatusChangeEvent')
+    if self.state is not None:
+      oprot.writeFieldBegin('state', TType.I32, 1)
+      oprot.writeI32(self.state)
+      oprot.writeFieldEnd()
+    if self.experimentId is not None:
+      oprot.writeFieldBegin('experimentId', TType.STRING, 2)
+      oprot.writeString(self.experimentId)
+      oprot.writeFieldEnd()
+    if self.gatewayId is not None:
+      oprot.writeFieldBegin('gatewayId', TType.STRING, 3)
+      oprot.writeString(self.gatewayId)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.state is None:
+      raise TProtocol.TProtocolException(message='Required field state is unset!')
+    if self.experimentId is None:
+      raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
+    if self.gatewayId is None:
+      raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class WorkflowIdentifier:
+  """
+  Attributes:
+   - workflowNodeId
+   - experimentId
+   - gatewayId
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'workflowNodeId', None, None, ), # 1
+    (2, TType.STRING, 'experimentId', None, None, ), # 2
+    (3, TType.STRING, 'gatewayId', None, None, ), # 3
+  )
+
+  def __init__(self, workflowNodeId=None, experimentId=None, gatewayId=None,):
+    self.workflowNodeId = workflowNodeId
+    self.experimentId = experimentId
+    self.gatewayId = gatewayId
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.workflowNodeId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.experimentId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.gatewayId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('WorkflowIdentifier')
+    if self.workflowNodeId is not None:
+      oprot.writeFieldBegin('workflowNodeId', TType.STRING, 1)
+      oprot.writeString(self.workflowNodeId)
+      oprot.writeFieldEnd()
+    if self.experimentId is not None:
+      oprot.writeFieldBegin('experimentId', TType.STRING, 2)
+      oprot.writeString(self.experimentId)
+      oprot.writeFieldEnd()
+    if self.gatewayId is not None:
+      oprot.writeFieldBegin('gatewayId', TType.STRING, 3)
+      oprot.writeString(self.gatewayId)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.workflowNodeId is None:
+      raise TProtocol.TProtocolException(message='Required field workflowNodeId is unset!')
+    if self.experimentId is None:
+      raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
+    if self.gatewayId is None:
+      raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class WorkflowNodeStatusChangeEvent:
+  """
+  Attributes:
+   - state
+   - workflowNodeIdentity
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'state', None, None, ), # 1
+    (2, TType.STRUCT, 'workflowNodeIdentity', (WorkflowIdentifier, WorkflowIdentifier.thrift_spec), None, ), # 2
+  )
+
+  def __init__(self, state=None, workflowNodeIdentity=None,):
+    self.state = state
+    self.workflowNodeIdentity = workflowNodeIdentity
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.state = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.workflowNodeIdentity = WorkflowIdentifier()
+          self.workflowNodeIdentity.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('WorkflowNodeStatusChangeEvent')
+    if self.state is not None:
+      oprot.writeFieldBegin('state', TType.I32, 1)
+      oprot.writeI32(self.state)
+      oprot.writeFieldEnd()
+    if self.workflowNodeIdentity is not None:
+      oprot.writeFieldBegin('workflowNodeIdentity', TType.STRUCT, 2)
+      self.workflowNodeIdentity.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.state is None:
+      raise TProtocol.TProtocolException(message='Required field state is unset!')
+    if self.workflowNodeIdentity is None:
+      raise TProtocol.TProtocolException(message='Required field workflowNodeIdentity is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TaskIdentifier:
+  """
+  Attributes:
+   - taskId
+   - workflowNodeId
+   - experimentId
+   - gatewayId
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'taskId', None, None, ), # 1
+    (2, TType.STRING, 'workflowNodeId', None, None, ), # 2
+    (3, TType.STRING, 'experimentId', None, None, ), # 3
+    (4, TType.STRING, 'gatewayId', None, None, ), # 4
+  )
+
+  def __init__(self, taskId=None, workflowNodeId=None, experimentId=None, gatewayId=None,):
+    self.taskId = taskId
+    self.workflowNodeId = workflowNodeId
+    self.experimentId = experimentId
+    self.gatewayId = gatewayId
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.taskId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.workflowNodeId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.experimentId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.gatewayId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TaskIdentifier')
+    if self.taskId is not None:
+      oprot.writeFieldBegin('taskId', TType.STRING, 1)
+      oprot.writeString(self.taskId)
+      oprot.writeFieldEnd()
+    if self.workflowNodeId is not None:
+      oprot.writeFieldBegin('workflowNodeId', TType.STRING, 2)
+      oprot.writeString(self.workflowNodeId)
+      oprot.writeFieldEnd()
+    if self.experimentId is not None:
+      oprot.writeFieldBegin('experimentId', TType.STRING, 3)
+      oprot.writeString(self.experimentId)
+      oprot.writeFieldEnd()
+    if self.gatewayId is not None:
+      oprot.writeFieldBegin('gatewayId', TType.STRING, 4)
+      oprot.writeString(self.gatewayId)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.taskId is None:
+      raise TProtocol.TProtocolException(message='Required field taskId is unset!')
+    if self.workflowNodeId is None:
+      raise TProtocol.TProtocolException(message='Required field workflowNodeId is unset!')
+    if self.experimentId is None:
+      raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
+    if self.gatewayId is None:
+      raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TaskStatusChangeEvent:
+  """
+  Attributes:
+   - state
+   - taskIdentity
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'state', None, None, ), # 1
+    (2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
+  )
+
+  def __init__(self, state=None, taskIdentity=None,):
+    self.state = state
+    self.taskIdentity = taskIdentity
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.state = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.taskIdentity = TaskIdentifier()
+          self.taskIdentity.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TaskStatusChangeEvent')
+    if self.state is not None:
+      oprot.writeFieldBegin('state', TType.I32, 1)
+      oprot.writeI32(self.state)
+      oprot.writeFieldEnd()
+    if self.taskIdentity is not None:
+      oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
+      self.taskIdentity.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.state is None:
+      raise TProtocol.TProtocolException(message='Required field state is unset!')
+    if self.taskIdentity is None:
+      raise TProtocol.TProtocolException(message='Required field taskIdentity is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TaskStatusChangeRequestEvent:
+  """
+  Attributes:
+   - state
+   - taskIdentity
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'state', None, None, ), # 1
+    (2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
+  )
+
+  def __init__(self, state=None, taskIdentity=None,):
+    self.state = state
+    self.taskIdentity = taskIdentity
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.state = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.taskIdentity = TaskIdentifier()
+          self.taskIdentity.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TaskStatusChangeRequestEvent')
+    if self.state is not None:
+      oprot.writeFieldBegin('state', TType.I32, 1)
+      oprot.writeI32(self.state)
+      oprot.writeFieldEnd()
+    if self.taskIdentity is not None:
+      oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
+      self.taskIdentity.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.state is None:
+      raise TProtocol.TProtocolException(message='Required field state is unset!')
+    if self.taskIdentity is None:
+      raise TProtocol.TProtocolException(message='Required field taskIdentity is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TaskOutputChangeEvent:
+  """
+  Attributes:
+   - output
+   - taskIdentity
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.LIST, 'output', (TType.STRUCT,(apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType, apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType.thrift_spec)), None, ), # 1
+    (2, TType.STRUCT, 'taskIdentity', (TaskIdentifier, TaskIdentifier.thrift_spec), None, ), # 2
+  )
+
+  def __init__(self, output=None, taskIdentity=None,):
+    self.output = output
+    self.taskIdentity = taskIdentity
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.LIST:
+          self.output = []
+          (_etype3, _size0) = iprot.readListBegin()
+          for _i4 in xrange(_size0):
+            _elem5 = apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType()
+            _elem5.read(iprot)
+            self.output.append(_elem5)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.taskIdentity = TaskIdentifier()
+          self.taskIdentity.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TaskOutputChangeEvent')
+    if self.output is not None:
+      oprot.writeFieldBegin('output', TType.LIST, 1)
+      oprot.writeListBegin(TType.STRUCT, len(self.output))
+      for iter6 in self.output:
+        iter6.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.taskIdentity is not None:
+      oprot.writeFieldBegin('taskIdentity', TType.STRUCT, 2)
+      self.taskIdentity.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.output is None:
+      raise TProtocol.TProtocolException(message='Required field output is unset!')
+    if self.taskIdentity is None:
+      raise TProtocol.TProtocolException(message='Required field taskIdentity is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class JobIdentifier:
+  """
+  Attributes:
+   - jobId
+   - taskId
+   - workflowNodeId
+   - experimentId
+   - gatewayId
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'jobId', None, None, ), # 1
+    (2, TType.STRING, 'taskId', None, None, ), # 2
+    (3, TType.STRING, 'workflowNodeId', None, None, ), # 3
+    (4, TType.STRING, 'experimentId', None, None, ), # 4
+    (5, TType.STRING, 'gatewayId', None, None, ), # 5
+  )
+
+  def __init__(self, jobId=None, taskId=None, workflowNodeId=None, experimentId=None, gatewayId=None,):
+    self.jobId = jobId
+    self.taskId = taskId
+    self.workflowNodeId = workflowNodeId
+    self.experimentId = experimentId
+    self.gatewayId = gatewayId
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.jobId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.taskId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.workflowNodeId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.experimentId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRING:
+          self.gatewayId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('JobIdentifier')
+    if self.jobId is not None:
+      oprot.writeFieldBegin('jobId', TType.STRING, 1)
+      oprot.writeString(self.jobId)
+      oprot.writeFieldEnd()
+    if self.taskId is not None:
+      oprot.writeFieldBegin('taskId', TType.STRING, 2)
+      oprot.writeString(self.taskId)
+      oprot.writeFieldEnd()
+    if self.workflowNodeId is not None:
+      oprot.writeFieldBegin('workflowNodeId', TType.STRING, 3)
+      oprot.writeString(self.workflowNodeId)
+      oprot.writeFieldEnd()
+    if self.experimentId is not None:
+      oprot.writeFieldBegin('experimentId', TType.STRING, 4)
+      oprot.writeString(self.experimentId)
+      oprot.writeFieldEnd()
+    if self.gatewayId is not None:
+      oprot.writeFieldBegin('gatewayId', TType.STRING, 5)
+      oprot.writeString(self.gatewayId)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobId is None:
+      raise TProtocol.TProtocolException(message='Required field jobId is unset!')
+    if self.taskId is None:
+      raise TProtocol.TProtocolException(message='Required field taskId is unset!')
+    if self.workflowNodeId is None:
+      raise TProtocol.TProtocolException(message='Required field workflowNodeId is unset!')
+    if self.experimentId is None:
+      raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
+    if self.gatewayId is None:
+      raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TaskSubmitEvent:
+  """
+  Attributes:
+   - experimentId
+   - taskId
+   - gatewayId
+   - tokenId
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'experimentId', None, None, ), # 1
+    (2, TType.STRING, 'taskId', None, None, ), # 2
+    (3, TType.STRING, 'gatewayId', None, None, ), # 3
+    (4, TType.STRING, 'tokenId', None, None, ), # 4
+  )
+
+  def __init__(self, experimentId=None, taskId=None, gatewayId=None, tokenId=None,):
+    self.experimentId = experimentId
+    self.taskId = taskId
+    self.gatewayId = gatewayId
+    self.tokenId = tokenId
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.experimentId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.taskId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.gatewayId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.tokenId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TaskSubmitEvent')
+    if self.experimentId is not None:
+      oprot.writeFieldBegin('experimentId', TType.STRING, 1)
+      oprot.writeString(self.experimentId)
+      oprot.writeFieldEnd()
+    if self.taskId is not None:
+      oprot.writeFieldBegin('taskId', TType.STRING, 2)
+      oprot.writeString(self.taskId)
+      oprot.writeFieldEnd()
+    if self.gatewayId is not None:
+      oprot.writeFieldBegin('gatewayId', TType.STRING, 3)
+      oprot.writeString(self.gatewayId)
+      oprot.writeFieldEnd()
+    if self.tokenId is not None:
+      oprot.writeFieldBegin('tokenId', TType.STRING, 4)
+      oprot.writeString(self.tokenId)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.experimentId is None:
+      raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
+    if self.taskId is None:
+      raise TProtocol.TProtocolException(message='Required field taskId is unset!')
+    if self.gatewayId is None:
+      raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
+    if self.tokenId is None:
+      raise TProtocol.TProtocolException(message='Required field tokenId is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TaskTerminateEvent:
+  """
+  Attributes:
+   - experimentId
+   - taskId
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'experimentId', None, None, ), # 1
+    (2, TType.STRING, 'taskId', None, None, ), # 2
+  )
+
+  def __init__(self, experimentId=None, taskId=None,):
+    self.experimentId = experimentId
+    self.taskId = taskId
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.experimentId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.taskId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TaskTerminateEvent')
+    if self.experimentId is not None:
+      oprot.writeFieldBegin('experimentId', TType.STRING, 1)
+      oprot.writeString(self.experimentId)
+      oprot.writeFieldEnd()
+    if self.taskId is not None:
+      oprot.writeFieldBegin('taskId', TType.STRING, 2)
+      oprot.writeString(self.taskId)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.experimentId is None:
+      raise TProtocol.TProtocolException(message='Required field experimentId is unset!')
+    if self.taskId is None:
+      raise TProtocol.TProtocolException(message='Required field taskId is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class JobStatusChangeEvent:
+  """
+  Attributes:
+   - state
+   - jobIdentity
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'state', None, None, ), # 1
+    (2, TType.STRUCT, 'jobIdentity', (JobIdentifier, JobIdentifier.thrift_spec), None, ), # 2
+  )
+
+  def __init__(self, state=None, jobIdentity=None,):
+    self.state = state
+    self.jobIdentity = jobIdentity
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.state = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.jobIdentity = JobIdentifier()
+          self.jobIdentity.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('JobStatusChangeEvent')
+    if self.state is not None:
+      oprot.writeFieldBegin('state', TType.I32, 1)
+      oprot.writeI32(self.state)
+      oprot.writeFieldEnd()
+    if self.jobIdentity is not None:
+      oprot.writeFieldBegin('jobIdentity', TType.STRUCT, 2)
+      self.jobIdentity.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.state is None:
+      raise TProtocol.TProtocolException(message='Required field state is unset!')
+    if self.jobIdentity is None:
+      raise TProtocol.TProtocolException(message='Required field jobIdentity is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class JobStatusChangeRequestEvent:
+  """
+  Attributes:
+   - state
+   - jobIdentity
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'state', None, None, ), # 1
+    (2, TType.STRUCT, 'jobIdentity', (JobIdentifier, JobIdentifier.thrift_spec), None, ), # 2
+  )
+
+  def __init__(self, state=None, jobIdentity=None,):
+    self.state = state
+    self.jobIdentity = jobIdentity
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.state = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.jobIdentity = JobIdentifier()
+          self.jobIdentity.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('JobStatusChangeRequestEvent')
+    if self.state is not None:
+      oprot.writeFieldBegin('state', TType.I32, 1)
+      oprot.writeI32(self.state)
+      oprot.writeFieldEnd()
+    if self.jobIdentity is not None:
+      oprot.writeFieldBegin('jobIdentity', TType.STRUCT, 2)
+      self.jobIdentity.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.state is None:
+      raise TProtocol.TProtocolException(message='Required field state is unset!')
+    if self.jobIdentity is None:
+      raise TProtocol.TProtocolException(message='Required field jobIdentity is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class Message:
+  """
+  Attributes:
+   - event
+   - messageId
+   - messageType
+   - updatedTime
+   - messageLevel
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'event', None, None, ), # 1
+    (2, TType.STRING, 'messageId', None, "DO_NOT_SET_AT_CLIENTS", ), # 2
+    (3, TType.I32, 'messageType', None, None, ), # 3
+    (4, TType.I64, 'updatedTime', None, None, ), # 4
+    (5, TType.I32, 'messageLevel', None, None, ), # 5
+  )
+
+  def __init__(self, event=None, messageId=thrift_spec[2][4], messageType=None, updatedTime=None, messageLevel=None,):
+    self.event = event
+    self.messageId = messageId
+    self.messageType = messageType
+    self.updatedTime = updatedTime
+    self.messageLevel = messageLevel
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.event = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.messageId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.messageType = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I64:
+          self.updatedTime = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.I32:
+          self.messageLevel = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('Message')
+    if self.event is not None:
+      oprot.writeFieldBegin('event', TType.STRING, 1)
+      oprot.writeString(self.event)
+      oprot.writeFieldEnd()
+    if self.messageId is not None:
+      oprot.writeFieldBegin('messageId', TType.STRING, 2)
+      oprot.writeString(self.messageId)
+      oprot.writeFieldEnd()
+    if self.messageType is not None:
+      oprot.writeFieldBegin('messageType', TType.I32, 3)
+      oprot.writeI32(self.messageType)
+      oprot.writeFieldEnd()
+    if self.updatedTime is not None:
+      oprot.writeFieldBegin('updatedTime', TType.I64, 4)
+      oprot.writeI64(self.updatedTime)
+      oprot.writeFieldEnd()
+    if self.messageLevel is not None:
+      oprot.writeFieldBegin('messageLevel', TType.I32, 5)
+      oprot.writeI32(self.messageLevel)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.event is None:
+      raise TProtocol.TProtocolException(message='Required field event is unset!')
+    if self.messageId is None:
+      raise TProtocol.TProtocolException(message='Required field messageId is unset!')
+    if self.messageType is None:
+      raise TProtocol.TProtocolException(message='Required field messageType is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/ttypes.py
new file mode 100644
index 0000000..5356eb5
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/ttypes.py
@@ -0,0 +1,22 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import apache.airavata.model.workspace.ttypes
+import apache.airavata.api.error.ttypes
+import apache.airavata.model.messaging.event.ttypes
+
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/constants.py
new file mode 100644
index 0000000..249ecf4
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/constants.py
@@ -0,0 +1,12 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+
+DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/ttypes.py
new file mode 100644
index 0000000..44552c2
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workflow/ttypes.py
@@ -0,0 +1,162 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import apache.airavata.model.appcatalog.appinterface.ttypes
+
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+
+class Workflow:
+  """
+  Attributes:
+   - templateId
+   - name
+   - graph
+   - image
+   - workflowInputs
+   - workflowOutputs
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'templateId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.STRING, 'name', None, None, ), # 2
+    (3, TType.STRING, 'graph', None, None, ), # 3
+    (4, TType.STRING, 'image', None, None, ), # 4
+    (5, TType.LIST, 'workflowInputs', (TType.STRUCT,(apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType, apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType.thrift_spec)), None, ), # 5
+    (6, TType.LIST, 'workflowOutputs', (TType.STRUCT,(apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType, apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType.thrift_spec)), None, ), # 6
+  )
+
+  def __init__(self, templateId=thrift_spec[1][4], name=None, graph=None, image=None, workflowInputs=None, workflowOutputs=None,):
+    self.templateId = templateId
+    self.name = name
+    self.graph = graph
+    self.image = image
+    self.workflowInputs = workflowInputs
+    self.workflowOutputs = workflowOutputs
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.templateId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.name = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.graph = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.image = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.LIST:
+          self.workflowInputs = []
+          (_etype3, _size0) = iprot.readListBegin()
+          for _i4 in xrange(_size0):
+            _elem5 = apache.airavata.model.appcatalog.appinterface.ttypes.InputDataObjectType()
+            _elem5.read(iprot)
+            self.workflowInputs.append(_elem5)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.LIST:
+          self.workflowOutputs = []
+          (_etype9, _size6) = iprot.readListBegin()
+          for _i10 in xrange(_size6):
+            _elem11 = apache.airavata.model.appcatalog.appinterface.ttypes.OutputDataObjectType()
+            _elem11.read(iprot)
+            self.workflowOutputs.append(_elem11)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('Workflow')
+    if self.templateId is not None:
+      oprot.writeFieldBegin('templateId', TType.STRING, 1)
+      oprot.writeString(self.templateId)
+      oprot.writeFieldEnd()
+    if self.name is not None:
+      oprot.writeFieldBegin('name', TType.STRING, 2)
+      oprot.writeString(self.name)
+      oprot.writeFieldEnd()
+    if self.graph is not None:
+      oprot.writeFieldBegin('graph', TType.STRING, 3)
+      oprot.writeString(self.graph)
+      oprot.writeFieldEnd()
+    if self.image is not None:
+      oprot.writeFieldBegin('image', TType.STRING, 4)
+      oprot.writeString(self.image)
+      oprot.writeFieldEnd()
+    if self.workflowInputs is not None:
+      oprot.writeFieldBegin('workflowInputs', TType.LIST, 5)
+      oprot.writeListBegin(TType.STRUCT, len(self.workflowInputs))
+      for iter12 in self.workflowInputs:
+        iter12.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.workflowOutputs is not None:
+      oprot.writeFieldBegin('workflowOutputs', TType.LIST, 6)
+      oprot.writeListBegin(TType.STRUCT, len(self.workflowOutputs))
+      for iter13 in self.workflowOutputs:
+        iter13.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.templateId is None:
+      raise TProtocol.TProtocolException(message='Required field templateId is unset!')
+    if self.name is None:
+      raise TProtocol.TProtocolException(message='Required field name is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/constants.py
new file mode 100644
index 0000000..35216c6
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/constants.py
@@ -0,0 +1,11 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/constants.py
new file mode 100644
index 0000000..e48cd00
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/experiment/constants.py
@@ -0,0 +1,14 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+
+DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"
+DEFAULT_PROJECT_NAME = "DEFAULT"
+SINGLE_APP_NODE_NAME = "SINGLE_APP_NODE"


[6/8] airavata git commit: Adding python generated code - AIRAVATA-1642

Posted by sm...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/__init__.py
new file mode 100644
index 0000000..e85fb34
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants', 'Airavata']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/constants.py
new file mode 100644
index 0000000..05093fe
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/constants.py
@@ -0,0 +1,12 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+
+AIRAVATA_API_VERSION = "0.15.0"

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/constants.py
new file mode 100644
index 0000000..35216c6
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/constants.py
@@ -0,0 +1,11 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/ttypes.py
new file mode 100644
index 0000000..24999f4
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/error/ttypes.py
@@ -0,0 +1,881 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import apache.airavata.model.workspace.experiment.ttypes
+
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+class AiravataErrorType:
+  """
+  A list of Airavata API Error Message Types
+
+   UNKNOWN: No information available about the error
+    
+   PERMISSION_DENIED: Not permitted to perform action
+
+   INTERNAL_ERROR: Unexpected problem with the service
+
+   AUTHENTICATION_FAILURE: The client failed to authenticate.
+
+   INVALID_AUTHORIZATION: Security Token and/or Username and/or password is incorrect
+    
+   AUTHORIZATION_EXPIRED: Authentication token expired
+   
+   UNKNOWN_GATEWAY_ID: The gateway is not registered with Airavata.
+
+   UNSUPPORTED_OPERATION: Operation denied because it is currently unsupported.
+  """
+  UNKNOWN = 0
+  PERMISSION_DENIED = 1
+  INTERNAL_ERROR = 2
+  AUTHENTICATION_FAILURE = 3
+  INVALID_AUTHORIZATION = 4
+  AUTHORIZATION_EXPIRED = 5
+  UNKNOWN_GATEWAY_ID = 6
+  UNSUPPORTED_OPERATION = 7
+
+  _VALUES_TO_NAMES = {
+    0: "UNKNOWN",
+    1: "PERMISSION_DENIED",
+    2: "INTERNAL_ERROR",
+    3: "AUTHENTICATION_FAILURE",
+    4: "INVALID_AUTHORIZATION",
+    5: "AUTHORIZATION_EXPIRED",
+    6: "UNKNOWN_GATEWAY_ID",
+    7: "UNSUPPORTED_OPERATION",
+  }
+
+  _NAMES_TO_VALUES = {
+    "UNKNOWN": 0,
+    "PERMISSION_DENIED": 1,
+    "INTERNAL_ERROR": 2,
+    "AUTHENTICATION_FAILURE": 3,
+    "INVALID_AUTHORIZATION": 4,
+    "AUTHORIZATION_EXPIRED": 5,
+    "UNKNOWN_GATEWAY_ID": 6,
+    "UNSUPPORTED_OPERATION": 7,
+  }
+
+
+class ExperimentNotFoundException(TException):
+  """
+  This exception is thrown when a client asks to perform an operation on an experiment that does not exist.
+
+  identifier:  A description of the experiment that was not found on the server.
+
+  key:  The value passed from the client in the identifier, which was not found.
+
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
+
+  def __init__(self, message=None,):
+    self.message = message
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.message = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ExperimentNotFoundException')
+    if self.message is not None:
+      oprot.writeFieldBegin('message', TType.STRING, 1)
+      oprot.writeString(self.message)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.message is None:
+      raise TProtocol.TProtocolException(message='Required field message is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ProjectNotFoundException(TException):
+  """
+  1:  optional  string identifier,
+  2:  optional  string key
+
+
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
+
+  def __init__(self, message=None,):
+    self.message = message
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.message = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ProjectNotFoundException')
+    if self.message is not None:
+      oprot.writeFieldBegin('message', TType.STRING, 1)
+      oprot.writeString(self.message)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.message is None:
+      raise TProtocol.TProtocolException(message='Required field message is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class InvalidRequestException(TException):
+  """
+  This exception is thrown for invalid requests that occur from any reasons like required input parameters are missing,
+   or a parameter is malformed.
+
+   message: contains the associated error message.
+
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
+
+  def __init__(self, message=None,):
+    self.message = message
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.message = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('InvalidRequestException')
+    if self.message is not None:
+      oprot.writeFieldBegin('message', TType.STRING, 1)
+      oprot.writeString(self.message)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.message is None:
+      raise TProtocol.TProtocolException(message='Required field message is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class TimedOutException(TException):
+  """
+  This exception is thrown when RPC timeout gets exceeded.
+  """
+
+  thrift_spec = (
+  )
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('TimedOutException')
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class AuthenticationException(TException):
+  """
+  This exception is thrown for invalid authentication requests.
+
+   message: contains the cause of the authorization failure.
+
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
+
+  def __init__(self, message=None,):
+    self.message = message
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.message = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('AuthenticationException')
+    if self.message is not None:
+      oprot.writeFieldBegin('message', TType.STRING, 1)
+      oprot.writeString(self.message)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.message is None:
+      raise TProtocol.TProtocolException(message='Required field message is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class AuthorizationException(TException):
+  """
+  This exception is thrown for invalid authorization requests such user does not have acces to an aplication or resource.
+
+   message: contains the authorization failure message
+
+  Attributes:
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'message', None, None, ), # 1
+  )
+
+  def __init__(self, message=None,):
+    self.message = message
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.message = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('AuthorizationException')
+    if self.message is not None:
+      oprot.writeFieldBegin('message', TType.STRING, 1)
+      oprot.writeString(self.message)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.message is None:
+      raise TProtocol.TProtocolException(message='Required field message is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class AiravataClientException(TException):
+  """
+  This exception is thrown by Airavata Services when a call fails as a result of
+  a problem that a client may be able to resolve.  For example, if the user
+  attempts to execute an application on a resource gateway does not have access to.
+
+  This exception would not be used for internal system errors that do not
+  reflect user actions, but rather reflect a problem within the service that
+  the client cannot resolve.
+
+  airavataErrorType:  The message type indicating the error that occurred.
+    must be one of the values of AiravataErrorType.
+
+  parameter:  If the error applied to a particular input parameter, this will
+    indicate which parameter.
+
+  Attributes:
+   - airavataErrorType
+   - parameter
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'airavataErrorType', None, None, ), # 1
+    (2, TType.STRING, 'parameter', None, None, ), # 2
+  )
+
+  def __init__(self, airavataErrorType=None, parameter=None,):
+    self.airavataErrorType = airavataErrorType
+    self.parameter = parameter
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.airavataErrorType = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.parameter = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('AiravataClientException')
+    if self.airavataErrorType is not None:
+      oprot.writeFieldBegin('airavataErrorType', TType.I32, 1)
+      oprot.writeI32(self.airavataErrorType)
+      oprot.writeFieldEnd()
+    if self.parameter is not None:
+      oprot.writeFieldBegin('parameter', TType.STRING, 2)
+      oprot.writeString(self.parameter)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.airavataErrorType is None:
+      raise TProtocol.TProtocolException(message='Required field airavataErrorType is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ValidatorResult:
+  """
+  Attributes:
+   - result
+   - errorDetails
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.BOOL, 'result', None, None, ), # 1
+    (2, TType.STRING, 'errorDetails', None, None, ), # 2
+  )
+
+  def __init__(self, result=None, errorDetails=None,):
+    self.result = result
+    self.errorDetails = errorDetails
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.BOOL:
+          self.result = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.errorDetails = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ValidatorResult')
+    if self.result is not None:
+      oprot.writeFieldBegin('result', TType.BOOL, 1)
+      oprot.writeBool(self.result)
+      oprot.writeFieldEnd()
+    if self.errorDetails is not None:
+      oprot.writeFieldBegin('errorDetails', TType.STRING, 2)
+      oprot.writeString(self.errorDetails)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.result is None:
+      raise TProtocol.TProtocolException(message='Required field result is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ValidationResults:
+  """
+  Attributes:
+   - validationState
+   - validationResultList
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.BOOL, 'validationState', None, None, ), # 1
+    (2, TType.LIST, 'validationResultList', (TType.STRUCT,(ValidatorResult, ValidatorResult.thrift_spec)), None, ), # 2
+  )
+
+  def __init__(self, validationState=None, validationResultList=None,):
+    self.validationState = validationState
+    self.validationResultList = validationResultList
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.BOOL:
+          self.validationState = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.LIST:
+          self.validationResultList = []
+          (_etype3, _size0) = iprot.readListBegin()
+          for _i4 in xrange(_size0):
+            _elem5 = ValidatorResult()
+            _elem5.read(iprot)
+            self.validationResultList.append(_elem5)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ValidationResults')
+    if self.validationState is not None:
+      oprot.writeFieldBegin('validationState', TType.BOOL, 1)
+      oprot.writeBool(self.validationState)
+      oprot.writeFieldEnd()
+    if self.validationResultList is not None:
+      oprot.writeFieldBegin('validationResultList', TType.LIST, 2)
+      oprot.writeListBegin(TType.STRUCT, len(self.validationResultList))
+      for iter6 in self.validationResultList:
+        iter6.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.validationState is None:
+      raise TProtocol.TProtocolException(message='Required field validationState is unset!')
+    if self.validationResultList is None:
+      raise TProtocol.TProtocolException(message='Required field validationResultList is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class LaunchValidationException(TException):
+  """
+  Attributes:
+   - validationResult
+   - errorMessage
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRUCT, 'validationResult', (ValidationResults, ValidationResults.thrift_spec), None, ), # 1
+    (2, TType.STRING, 'errorMessage', None, None, ), # 2
+  )
+
+  def __init__(self, validationResult=None, errorMessage=None,):
+    self.validationResult = validationResult
+    self.errorMessage = errorMessage
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRUCT:
+          self.validationResult = ValidationResults()
+          self.validationResult.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.errorMessage = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('LaunchValidationException')
+    if self.validationResult is not None:
+      oprot.writeFieldBegin('validationResult', TType.STRUCT, 1)
+      self.validationResult.write(oprot)
+      oprot.writeFieldEnd()
+    if self.errorMessage is not None:
+      oprot.writeFieldBegin('errorMessage', TType.STRING, 2)
+      oprot.writeString(self.errorMessage)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.validationResult is None:
+      raise TProtocol.TProtocolException(message='Required field validationResult is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class AiravataSystemException(TException):
+  """
+  This exception is thrown by Airavata Services when a call fails as a result of
+  a problem in the service that could not be changed through client's action.
+
+  airavataErrorType:  The message type indicating the error that occurred.
+    must be one of the values of AiravataErrorType.
+
+  message:  This may contain additional information about the error
+
+
+  Attributes:
+   - airavataErrorType
+   - message
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.I32, 'airavataErrorType', None, None, ), # 1
+    (2, TType.STRING, 'message', None, None, ), # 2
+  )
+
+  def __init__(self, airavataErrorType=None, message=None,):
+    self.airavataErrorType = airavataErrorType
+    self.message = message
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.I32:
+          self.airavataErrorType = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.message = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('AiravataSystemException')
+    if self.airavataErrorType is not None:
+      oprot.writeFieldBegin('airavataErrorType', TType.I32, 1)
+      oprot.writeI32(self.airavataErrorType)
+      oprot.writeFieldEnd()
+    if self.message is not None:
+      oprot.writeFieldBegin('message', TType.STRING, 2)
+      oprot.writeString(self.message)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.airavataErrorType is None:
+      raise TProtocol.TProtocolException(message='Required field airavataErrorType is unset!')
+    return
+
+
+  def __str__(self):
+    return repr(self)
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/ttypes.py
new file mode 100644
index 0000000..09140d0
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/ttypes.py
@@ -0,0 +1,28 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import apache.airavata.api.error.ttypes
+import apache.airavata.model.ttypes
+import apache.airavata.model.workspace.experiment.ttypes
+import apache.airavata.model.workspace.ttypes
+import apache.airavata.model.appcatalog.computeresource.ttypes
+import apache.airavata.model.appcatalog.appdeployment.ttypes
+import apache.airavata.model.appcatalog.appinterface.ttypes
+import apache.airavata.model.appcatalog.gatewayprofile.ttypes
+import apache.airavata.model.workflow.ttypes
+
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/constants.py
new file mode 100644
index 0000000..249ecf4
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/constants.py
@@ -0,0 +1,12 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+
+DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/ttypes.py
new file mode 100644
index 0000000..7383b6d
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appdeployment/ttypes.py
@@ -0,0 +1,530 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+class ApplicationParallelismType:
+  """
+  Enumeration of application parallelism supported by Airavata
+
+  SERIAL:
+   Single processor applications without any parallelization.
+
+  MPI:
+   Messaging Passing Interface.
+
+  OPENMP:
+   Shared Memory Implementtaion.
+
+  OPENMP_MPI:
+   Hybrid Applications.
+
+  """
+  SERIAL = 0
+  MPI = 1
+  OPENMP = 2
+  OPENMP_MPI = 3
+
+  _VALUES_TO_NAMES = {
+    0: "SERIAL",
+    1: "MPI",
+    2: "OPENMP",
+    3: "OPENMP_MPI",
+  }
+
+  _NAMES_TO_VALUES = {
+    "SERIAL": 0,
+    "MPI": 1,
+    "OPENMP": 2,
+    "OPENMP_MPI": 3,
+  }
+
+
+class SetEnvPaths:
+  """
+  Key Value pairs to be used to set environments
+
+  name:
+    Name of the environment variable such as PATH, LD_LIBRARY_PATH, NETCDF_HOME.
+
+  value:
+    Value of the environment variable to set
+
+  Attributes:
+   - name
+   - value
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'name', None, None, ), # 1
+    (2, TType.STRING, 'value', None, None, ), # 2
+  )
+
+  def __init__(self, name=None, value=None,):
+    self.name = name
+    self.value = value
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.name = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.value = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('SetEnvPaths')
+    if self.name is not None:
+      oprot.writeFieldBegin('name', TType.STRING, 1)
+      oprot.writeString(self.name)
+      oprot.writeFieldEnd()
+    if self.value is not None:
+      oprot.writeFieldBegin('value', TType.STRING, 2)
+      oprot.writeString(self.value)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.name is None:
+      raise TProtocol.TProtocolException(message='Required field name is unset!')
+    if self.value is None:
+      raise TProtocol.TProtocolException(message='Required field value is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ApplicationModule:
+  """
+  Application Module Information. A module has to be registered before registering a deployment.
+
+  appModuleId: Airavata Internal Unique Job ID. This is set by the registry.
+
+  appModuleName:
+    Name of the application module.
+
+  appModuleVersion:
+    Version of the application.
+
+  appModuleDescription:
+     Descriprion of the Module
+
+
+  Attributes:
+   - appModuleId
+   - appModuleName
+   - appModuleVersion
+   - appModuleDescription
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'appModuleId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.STRING, 'appModuleName', None, None, ), # 2
+    (3, TType.STRING, 'appModuleVersion', None, None, ), # 3
+    (4, TType.STRING, 'appModuleDescription', None, None, ), # 4
+  )
+
+  def __init__(self, appModuleId=thrift_spec[1][4], appModuleName=None, appModuleVersion=None, appModuleDescription=None,):
+    self.appModuleId = appModuleId
+    self.appModuleName = appModuleName
+    self.appModuleVersion = appModuleVersion
+    self.appModuleDescription = appModuleDescription
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.appModuleId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.appModuleName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.appModuleVersion = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.appModuleDescription = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ApplicationModule')
+    if self.appModuleId is not None:
+      oprot.writeFieldBegin('appModuleId', TType.STRING, 1)
+      oprot.writeString(self.appModuleId)
+      oprot.writeFieldEnd()
+    if self.appModuleName is not None:
+      oprot.writeFieldBegin('appModuleName', TType.STRING, 2)
+      oprot.writeString(self.appModuleName)
+      oprot.writeFieldEnd()
+    if self.appModuleVersion is not None:
+      oprot.writeFieldBegin('appModuleVersion', TType.STRING, 3)
+      oprot.writeString(self.appModuleVersion)
+      oprot.writeFieldEnd()
+    if self.appModuleDescription is not None:
+      oprot.writeFieldBegin('appModuleDescription', TType.STRING, 4)
+      oprot.writeString(self.appModuleDescription)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.appModuleId is None:
+      raise TProtocol.TProtocolException(message='Required field appModuleId is unset!')
+    if self.appModuleName is None:
+      raise TProtocol.TProtocolException(message='Required field appModuleName is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ApplicationDeploymentDescription:
+  """
+  Application Deployment Description
+
+  appDeploymentId: Airavata Internal Unique Job ID. This is set by the registry.
+
+  appModuleName:
+    Application Module Name. This has to be precise describing the binary.
+
+  computeHostId:
+    This ID maps application deployment to a particular resource previously described within Airavata.
+    Example: Stampede is first registered and refered when registering WRF.
+
+  moduleLoadCmd:
+   Command string to load modules. This will be placed in the job submisison
+   Ex: module load amber
+
+  libPrependPaths:
+   prepend to a path variable the value
+
+  libAppendPaths:
+   append to a path variable the value
+
+  setEnvironment:
+   assigns to the environment variable "NAME" the value
+
+
+  Attributes:
+   - appDeploymentId
+   - appModuleId
+   - computeHostId
+   - executablePath
+   - parallelism
+   - appDeploymentDescription
+   - moduleLoadCmds
+   - libPrependPaths
+   - libAppendPaths
+   - setEnvironment
+   - preJobCommands
+   - postJobCommands
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'appDeploymentId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.STRING, 'appModuleId', None, None, ), # 2
+    (3, TType.STRING, 'computeHostId', None, None, ), # 3
+    (4, TType.STRING, 'executablePath', None, None, ), # 4
+    (5, TType.I32, 'parallelism', None,     0, ), # 5
+    (6, TType.STRING, 'appDeploymentDescription', None, None, ), # 6
+    (7, TType.LIST, 'moduleLoadCmds', (TType.STRING,None), None, ), # 7
+    (8, TType.LIST, 'libPrependPaths', (TType.STRUCT,(SetEnvPaths, SetEnvPaths.thrift_spec)), None, ), # 8
+    (9, TType.LIST, 'libAppendPaths', (TType.STRUCT,(SetEnvPaths, SetEnvPaths.thrift_spec)), None, ), # 9
+    (10, TType.LIST, 'setEnvironment', (TType.STRUCT,(SetEnvPaths, SetEnvPaths.thrift_spec)), None, ), # 10
+    (11, TType.LIST, 'preJobCommands', (TType.STRING,None), None, ), # 11
+    (12, TType.LIST, 'postJobCommands', (TType.STRING,None), None, ), # 12
+  )
+
+  def __init__(self, appDeploymentId=thrift_spec[1][4], appModuleId=None, computeHostId=None, executablePath=None, parallelism=thrift_spec[5][4], appDeploymentDescription=None, moduleLoadCmds=None, libPrependPaths=None, libAppendPaths=None, setEnvironment=None, preJobCommands=None, postJobCommands=None,):
+    self.appDeploymentId = appDeploymentId
+    self.appModuleId = appModuleId
+    self.computeHostId = computeHostId
+    self.executablePath = executablePath
+    self.parallelism = parallelism
+    self.appDeploymentDescription = appDeploymentDescription
+    self.moduleLoadCmds = moduleLoadCmds
+    self.libPrependPaths = libPrependPaths
+    self.libAppendPaths = libAppendPaths
+    self.setEnvironment = setEnvironment
+    self.preJobCommands = preJobCommands
+    self.postJobCommands = postJobCommands
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.appDeploymentId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.appModuleId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.computeHostId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.executablePath = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.I32:
+          self.parallelism = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.STRING:
+          self.appDeploymentDescription = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.LIST:
+          self.moduleLoadCmds = []
+          (_etype3, _size0) = iprot.readListBegin()
+          for _i4 in xrange(_size0):
+            _elem5 = iprot.readString();
+            self.moduleLoadCmds.append(_elem5)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.LIST:
+          self.libPrependPaths = []
+          (_etype9, _size6) = iprot.readListBegin()
+          for _i10 in xrange(_size6):
+            _elem11 = SetEnvPaths()
+            _elem11.read(iprot)
+            self.libPrependPaths.append(_elem11)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.LIST:
+          self.libAppendPaths = []
+          (_etype15, _size12) = iprot.readListBegin()
+          for _i16 in xrange(_size12):
+            _elem17 = SetEnvPaths()
+            _elem17.read(iprot)
+            self.libAppendPaths.append(_elem17)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 10:
+        if ftype == TType.LIST:
+          self.setEnvironment = []
+          (_etype21, _size18) = iprot.readListBegin()
+          for _i22 in xrange(_size18):
+            _elem23 = SetEnvPaths()
+            _elem23.read(iprot)
+            self.setEnvironment.append(_elem23)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 11:
+        if ftype == TType.LIST:
+          self.preJobCommands = []
+          (_etype27, _size24) = iprot.readListBegin()
+          for _i28 in xrange(_size24):
+            _elem29 = iprot.readString();
+            self.preJobCommands.append(_elem29)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 12:
+        if ftype == TType.LIST:
+          self.postJobCommands = []
+          (_etype33, _size30) = iprot.readListBegin()
+          for _i34 in xrange(_size30):
+            _elem35 = iprot.readString();
+            self.postJobCommands.append(_elem35)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ApplicationDeploymentDescription')
+    if self.appDeploymentId is not None:
+      oprot.writeFieldBegin('appDeploymentId', TType.STRING, 1)
+      oprot.writeString(self.appDeploymentId)
+      oprot.writeFieldEnd()
+    if self.appModuleId is not None:
+      oprot.writeFieldBegin('appModuleId', TType.STRING, 2)
+      oprot.writeString(self.appModuleId)
+      oprot.writeFieldEnd()
+    if self.computeHostId is not None:
+      oprot.writeFieldBegin('computeHostId', TType.STRING, 3)
+      oprot.writeString(self.computeHostId)
+      oprot.writeFieldEnd()
+    if self.executablePath is not None:
+      oprot.writeFieldBegin('executablePath', TType.STRING, 4)
+      oprot.writeString(self.executablePath)
+      oprot.writeFieldEnd()
+    if self.parallelism is not None:
+      oprot.writeFieldBegin('parallelism', TType.I32, 5)
+      oprot.writeI32(self.parallelism)
+      oprot.writeFieldEnd()
+    if self.appDeploymentDescription is not None:
+      oprot.writeFieldBegin('appDeploymentDescription', TType.STRING, 6)
+      oprot.writeString(self.appDeploymentDescription)
+      oprot.writeFieldEnd()
+    if self.moduleLoadCmds is not None:
+      oprot.writeFieldBegin('moduleLoadCmds', TType.LIST, 7)
+      oprot.writeListBegin(TType.STRING, len(self.moduleLoadCmds))
+      for iter36 in self.moduleLoadCmds:
+        oprot.writeString(iter36)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.libPrependPaths is not None:
+      oprot.writeFieldBegin('libPrependPaths', TType.LIST, 8)
+      oprot.writeListBegin(TType.STRUCT, len(self.libPrependPaths))
+      for iter37 in self.libPrependPaths:
+        iter37.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.libAppendPaths is not None:
+      oprot.writeFieldBegin('libAppendPaths', TType.LIST, 9)
+      oprot.writeListBegin(TType.STRUCT, len(self.libAppendPaths))
+      for iter38 in self.libAppendPaths:
+        iter38.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.setEnvironment is not None:
+      oprot.writeFieldBegin('setEnvironment', TType.LIST, 10)
+      oprot.writeListBegin(TType.STRUCT, len(self.setEnvironment))
+      for iter39 in self.setEnvironment:
+        iter39.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.preJobCommands is not None:
+      oprot.writeFieldBegin('preJobCommands', TType.LIST, 11)
+      oprot.writeListBegin(TType.STRING, len(self.preJobCommands))
+      for iter40 in self.preJobCommands:
+        oprot.writeString(iter40)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.postJobCommands is not None:
+      oprot.writeFieldBegin('postJobCommands', TType.LIST, 12)
+      oprot.writeListBegin(TType.STRING, len(self.postJobCommands))
+      for iter41 in self.postJobCommands:
+        oprot.writeString(iter41)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.appDeploymentId is None:
+      raise TProtocol.TProtocolException(message='Required field appDeploymentId is unset!')
+    if self.appModuleId is None:
+      raise TProtocol.TProtocolException(message='Required field appModuleId is unset!')
+    if self.computeHostId is None:
+      raise TProtocol.TProtocolException(message='Required field computeHostId is unset!')
+    if self.executablePath is None:
+      raise TProtocol.TProtocolException(message='Required field executablePath is unset!')
+    if self.parallelism is None:
+      raise TProtocol.TProtocolException(message='Required field parallelism is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/constants.py
new file mode 100644
index 0000000..249ecf4
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/constants.py
@@ -0,0 +1,12 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+
+DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/ttypes.py
new file mode 100644
index 0000000..942ff8a
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/appinterface/ttypes.py
@@ -0,0 +1,600 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+class DataType:
+  """
+  Data Types supported in Airavata. The primitive data types
+
+  """
+  STRING = 0
+  INTEGER = 1
+  FLOAT = 2
+  URI = 3
+  STDOUT = 4
+  STDERR = 5
+
+  _VALUES_TO_NAMES = {
+    0: "STRING",
+    1: "INTEGER",
+    2: "FLOAT",
+    3: "URI",
+    4: "STDOUT",
+    5: "STDERR",
+  }
+
+  _NAMES_TO_VALUES = {
+    "STRING": 0,
+    "INTEGER": 1,
+    "FLOAT": 2,
+    "URI": 3,
+    "STDOUT": 4,
+    "STDERR": 5,
+  }
+
+
+class InputDataObjectType:
+  """
+  Application Inputs. The paramters describe how inputs are passed to the application.
+
+  name:
+    Name of the parameter.
+
+  value:
+    Value of the parameter. A default value could be set during registration.
+
+  type:
+    Data type of the parameter
+
+  applicationArguement:
+    The argument flag sent to the application. Such as -p pressure.
+
+  standardInput:
+    When this value is set, the parameter is sent as standard input rather than a parameter.
+    Typically this is passed using redirection operator ">".
+
+  userFriendlyDescription:
+    Description to be displayed at the user interface.
+
+  metaData:
+    Any metadat. This is typically ignore by Airavata and is used by gateways for application configuration.
+
+
+  Attributes:
+   - name
+   - value
+   - type
+   - applicationArgument
+   - standardInput
+   - userFriendlyDescription
+   - metaData
+   - inputOrder
+   - isRequired
+   - requiredToAddedToCommandLine
+   - dataStaged
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'name', None, None, ), # 1
+    (2, TType.STRING, 'value', None, None, ), # 2
+    (3, TType.I32, 'type', None, None, ), # 3
+    (4, TType.STRING, 'applicationArgument', None, None, ), # 4
+    (5, TType.BOOL, 'standardInput', None, False, ), # 5
+    (6, TType.STRING, 'userFriendlyDescription', None, None, ), # 6
+    (7, TType.STRING, 'metaData', None, None, ), # 7
+    (8, TType.I32, 'inputOrder', None, None, ), # 8
+    (9, TType.BOOL, 'isRequired', None, None, ), # 9
+    (10, TType.BOOL, 'requiredToAddedToCommandLine', None, None, ), # 10
+    (11, TType.BOOL, 'dataStaged', None, False, ), # 11
+  )
+
+  def __init__(self, name=None, value=None, type=None, applicationArgument=None, standardInput=thrift_spec[5][4], userFriendlyDescription=None, metaData=None, inputOrder=None, isRequired=None, requiredToAddedToCommandLine=None, dataStaged=thrift_spec[11][4],):
+    self.name = name
+    self.value = value
+    self.type = type
+    self.applicationArgument = applicationArgument
+    self.standardInput = standardInput
+    self.userFriendlyDescription = userFriendlyDescription
+    self.metaData = metaData
+    self.inputOrder = inputOrder
+    self.isRequired = isRequired
+    self.requiredToAddedToCommandLine = requiredToAddedToCommandLine
+    self.dataStaged = dataStaged
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.name = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.value = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.type = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.applicationArgument = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.BOOL:
+          self.standardInput = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.STRING:
+          self.userFriendlyDescription = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.STRING:
+          self.metaData = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.I32:
+          self.inputOrder = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.BOOL:
+          self.isRequired = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 10:
+        if ftype == TType.BOOL:
+          self.requiredToAddedToCommandLine = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 11:
+        if ftype == TType.BOOL:
+          self.dataStaged = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('InputDataObjectType')
+    if self.name is not None:
+      oprot.writeFieldBegin('name', TType.STRING, 1)
+      oprot.writeString(self.name)
+      oprot.writeFieldEnd()
+    if self.value is not None:
+      oprot.writeFieldBegin('value', TType.STRING, 2)
+      oprot.writeString(self.value)
+      oprot.writeFieldEnd()
+    if self.type is not None:
+      oprot.writeFieldBegin('type', TType.I32, 3)
+      oprot.writeI32(self.type)
+      oprot.writeFieldEnd()
+    if self.applicationArgument is not None:
+      oprot.writeFieldBegin('applicationArgument', TType.STRING, 4)
+      oprot.writeString(self.applicationArgument)
+      oprot.writeFieldEnd()
+    if self.standardInput is not None:
+      oprot.writeFieldBegin('standardInput', TType.BOOL, 5)
+      oprot.writeBool(self.standardInput)
+      oprot.writeFieldEnd()
+    if self.userFriendlyDescription is not None:
+      oprot.writeFieldBegin('userFriendlyDescription', TType.STRING, 6)
+      oprot.writeString(self.userFriendlyDescription)
+      oprot.writeFieldEnd()
+    if self.metaData is not None:
+      oprot.writeFieldBegin('metaData', TType.STRING, 7)
+      oprot.writeString(self.metaData)
+      oprot.writeFieldEnd()
+    if self.inputOrder is not None:
+      oprot.writeFieldBegin('inputOrder', TType.I32, 8)
+      oprot.writeI32(self.inputOrder)
+      oprot.writeFieldEnd()
+    if self.isRequired is not None:
+      oprot.writeFieldBegin('isRequired', TType.BOOL, 9)
+      oprot.writeBool(self.isRequired)
+      oprot.writeFieldEnd()
+    if self.requiredToAddedToCommandLine is not None:
+      oprot.writeFieldBegin('requiredToAddedToCommandLine', TType.BOOL, 10)
+      oprot.writeBool(self.requiredToAddedToCommandLine)
+      oprot.writeFieldEnd()
+    if self.dataStaged is not None:
+      oprot.writeFieldBegin('dataStaged', TType.BOOL, 11)
+      oprot.writeBool(self.dataStaged)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.name is None:
+      raise TProtocol.TProtocolException(message='Required field name is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class OutputDataObjectType:
+  """
+  Application Outputs. The paramters describe how outputs generated by the application.
+
+  name:
+    Name of the parameter.
+
+  value:
+    Value of the parameter.
+
+  type:
+    Data type of the parameter
+
+  applicationArguement:
+    The argument flag sent to the application. Such as -p pressure.
+
+  standardInput:
+    When this value is set, the parameter is sent as standard input rather than a parameter.
+    Typically this is passed using redirection operator ">".
+
+  userFriendlyDescription:
+    Description to be displayed at the user interface.
+
+  metaData:
+    Any metadat. This is typically ignore by Airavata and is used by gateways for application configuration.
+
+
+  Attributes:
+   - name
+   - value
+   - type
+   - applicationArgument
+   - isRequired
+   - requiredToAddedToCommandLine
+   - dataMovement
+   - location
+   - searchQuery
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'name', None, None, ), # 1
+    (2, TType.STRING, 'value', None, None, ), # 2
+    (3, TType.I32, 'type', None, None, ), # 3
+    (4, TType.STRING, 'applicationArgument', None, None, ), # 4
+    (5, TType.BOOL, 'isRequired', None, None, ), # 5
+    (6, TType.BOOL, 'requiredToAddedToCommandLine', None, None, ), # 6
+    (7, TType.BOOL, 'dataMovement', None, None, ), # 7
+    (8, TType.STRING, 'location', None, None, ), # 8
+    (9, TType.STRING, 'searchQuery', None, None, ), # 9
+  )
+
+  def __init__(self, name=None, value=None, type=None, applicationArgument=None, isRequired=None, requiredToAddedToCommandLine=None, dataMovement=None, location=None, searchQuery=None,):
+    self.name = name
+    self.value = value
+    self.type = type
+    self.applicationArgument = applicationArgument
+    self.isRequired = isRequired
+    self.requiredToAddedToCommandLine = requiredToAddedToCommandLine
+    self.dataMovement = dataMovement
+    self.location = location
+    self.searchQuery = searchQuery
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.name = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.value = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.type = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.applicationArgument = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.BOOL:
+          self.isRequired = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.BOOL:
+          self.requiredToAddedToCommandLine = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.BOOL:
+          self.dataMovement = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.STRING:
+          self.location = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.STRING:
+          self.searchQuery = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('OutputDataObjectType')
+    if self.name is not None:
+      oprot.writeFieldBegin('name', TType.STRING, 1)
+      oprot.writeString(self.name)
+      oprot.writeFieldEnd()
+    if self.value is not None:
+      oprot.writeFieldBegin('value', TType.STRING, 2)
+      oprot.writeString(self.value)
+      oprot.writeFieldEnd()
+    if self.type is not None:
+      oprot.writeFieldBegin('type', TType.I32, 3)
+      oprot.writeI32(self.type)
+      oprot.writeFieldEnd()
+    if self.applicationArgument is not None:
+      oprot.writeFieldBegin('applicationArgument', TType.STRING, 4)
+      oprot.writeString(self.applicationArgument)
+      oprot.writeFieldEnd()
+    if self.isRequired is not None:
+      oprot.writeFieldBegin('isRequired', TType.BOOL, 5)
+      oprot.writeBool(self.isRequired)
+      oprot.writeFieldEnd()
+    if self.requiredToAddedToCommandLine is not None:
+      oprot.writeFieldBegin('requiredToAddedToCommandLine', TType.BOOL, 6)
+      oprot.writeBool(self.requiredToAddedToCommandLine)
+      oprot.writeFieldEnd()
+    if self.dataMovement is not None:
+      oprot.writeFieldBegin('dataMovement', TType.BOOL, 7)
+      oprot.writeBool(self.dataMovement)
+      oprot.writeFieldEnd()
+    if self.location is not None:
+      oprot.writeFieldBegin('location', TType.STRING, 8)
+      oprot.writeString(self.location)
+      oprot.writeFieldEnd()
+    if self.searchQuery is not None:
+      oprot.writeFieldBegin('searchQuery', TType.STRING, 9)
+      oprot.writeString(self.searchQuery)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.name is None:
+      raise TProtocol.TProtocolException(message='Required field name is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ApplicationInterfaceDescription:
+  """
+  Application Interface Description
+
+  applicationModules:
+    Associate all application modules with versions which interface is applicable to.
+
+  applicationInputs:
+    Inputs to be passed to the application
+
+  applicationOutputs:
+    Outputs generated from the application
+
+
+  Attributes:
+   - applicationInterfaceId
+   - applicationName
+   - applicationDescription
+   - applicationModules
+   - applicationInputs
+   - applicationOutputs
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'applicationInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.STRING, 'applicationName', None, None, ), # 2
+    (3, TType.STRING, 'applicationDescription', None, None, ), # 3
+    (4, TType.LIST, 'applicationModules', (TType.STRING,None), None, ), # 4
+    (5, TType.LIST, 'applicationInputs', (TType.STRUCT,(InputDataObjectType, InputDataObjectType.thrift_spec)), None, ), # 5
+    (6, TType.LIST, 'applicationOutputs', (TType.STRUCT,(OutputDataObjectType, OutputDataObjectType.thrift_spec)), None, ), # 6
+  )
+
+  def __init__(self, applicationInterfaceId=thrift_spec[1][4], applicationName=None, applicationDescription=None, applicationModules=None, applicationInputs=None, applicationOutputs=None,):
+    self.applicationInterfaceId = applicationInterfaceId
+    self.applicationName = applicationName
+    self.applicationDescription = applicationDescription
+    self.applicationModules = applicationModules
+    self.applicationInputs = applicationInputs
+    self.applicationOutputs = applicationOutputs
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.applicationInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.applicationName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.applicationDescription = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.LIST:
+          self.applicationModules = []
+          (_etype3, _size0) = iprot.readListBegin()
+          for _i4 in xrange(_size0):
+            _elem5 = iprot.readString();
+            self.applicationModules.append(_elem5)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.LIST:
+          self.applicationInputs = []
+          (_etype9, _size6) = iprot.readListBegin()
+          for _i10 in xrange(_size6):
+            _elem11 = InputDataObjectType()
+            _elem11.read(iprot)
+            self.applicationInputs.append(_elem11)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.LIST:
+          self.applicationOutputs = []
+          (_etype15, _size12) = iprot.readListBegin()
+          for _i16 in xrange(_size12):
+            _elem17 = OutputDataObjectType()
+            _elem17.read(iprot)
+            self.applicationOutputs.append(_elem17)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ApplicationInterfaceDescription')
+    if self.applicationInterfaceId is not None:
+      oprot.writeFieldBegin('applicationInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.applicationInterfaceId)
+      oprot.writeFieldEnd()
+    if self.applicationName is not None:
+      oprot.writeFieldBegin('applicationName', TType.STRING, 2)
+      oprot.writeString(self.applicationName)
+      oprot.writeFieldEnd()
+    if self.applicationDescription is not None:
+      oprot.writeFieldBegin('applicationDescription', TType.STRING, 3)
+      oprot.writeString(self.applicationDescription)
+      oprot.writeFieldEnd()
+    if self.applicationModules is not None:
+      oprot.writeFieldBegin('applicationModules', TType.LIST, 4)
+      oprot.writeListBegin(TType.STRING, len(self.applicationModules))
+      for iter18 in self.applicationModules:
+        oprot.writeString(iter18)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.applicationInputs is not None:
+      oprot.writeFieldBegin('applicationInputs', TType.LIST, 5)
+      oprot.writeListBegin(TType.STRUCT, len(self.applicationInputs))
+      for iter19 in self.applicationInputs:
+        iter19.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.applicationOutputs is not None:
+      oprot.writeFieldBegin('applicationOutputs', TType.LIST, 6)
+      oprot.writeListBegin(TType.STRUCT, len(self.applicationOutputs))
+      for iter20 in self.applicationOutputs:
+        iter20.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.applicationInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field applicationInterfaceId is unset!')
+    if self.applicationName is None:
+      raise TProtocol.TProtocolException(message='Required field applicationName is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/constants.py
new file mode 100644
index 0000000..249ecf4
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/constants.py
@@ -0,0 +1,12 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+
+DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"


[5/8] airavata git commit: Adding python generated code - AIRAVATA-1642

Posted by sm...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/ttypes.py
new file mode 100644
index 0000000..7f70c25
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/computeresource/ttypes.py
@@ -0,0 +1,1967 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+class ResourceJobManagerType:
+  """
+  * Enumeration of local resource job manager types supported by Airavata
+  *
+  * FORK:
+  *  Forking of commands without any job manager
+  *
+  * PBS:
+  *  Job manager supporting the Portal Batch System (PBS) protocol. Some examples include TORQUE, PBSPro, Grid Engine.
+  *
+  * SLURM:
+  *  The Simple Linux Utility for Resource Management is a open source workload manager.
+   *
+   * UGE:
+   *  Univa Grid Engine, a variation of PBS implementation.
+   *
+   * LSF:
+   *  IBM Platform Load Sharing Facility is dominantly installed on IBM clusters.
+  *
+  """
+  FORK = 0
+  PBS = 1
+  SLURM = 2
+  LSF = 3
+  UGE = 4
+
+  _VALUES_TO_NAMES = {
+    0: "FORK",
+    1: "PBS",
+    2: "SLURM",
+    3: "LSF",
+    4: "UGE",
+  }
+
+  _NAMES_TO_VALUES = {
+    "FORK": 0,
+    "PBS": 1,
+    "SLURM": 2,
+    "LSF": 3,
+    "UGE": 4,
+  }
+
+class JobManagerCommand:
+  """
+  Enumeration of resource job manager commands
+
+  SUBMISSION:
+   Ex: qsub, sbatch
+
+  JOBMONITORING:
+   Ex: qstat, squeue
+
+  DELETION:
+   Ex: qdel, scancel
+
+  CHECK_JOB:
+   Detailed Status about the Job. Ex: checkjob
+
+  SHOW_QUEUE:
+   List of Queued Job by the schedular. Ex: showq
+
+  SHOW_RESERVATION:
+   List all reservations. Ex:showres, show_res
+
+  SHOW_START:
+   Display the start time of the specified job. Ex: showstart
+
+  """
+  SUBMISSION = 0
+  JOB_MONITORING = 1
+  DELETION = 2
+  CHECK_JOB = 3
+  SHOW_QUEUE = 4
+  SHOW_RESERVATION = 5
+  SHOW_START = 6
+
+  _VALUES_TO_NAMES = {
+    0: "SUBMISSION",
+    1: "JOB_MONITORING",
+    2: "DELETION",
+    3: "CHECK_JOB",
+    4: "SHOW_QUEUE",
+    5: "SHOW_RESERVATION",
+    6: "SHOW_START",
+  }
+
+  _NAMES_TO_VALUES = {
+    "SUBMISSION": 0,
+    "JOB_MONITORING": 1,
+    "DELETION": 2,
+    "CHECK_JOB": 3,
+    "SHOW_QUEUE": 4,
+    "SHOW_RESERVATION": 5,
+    "SHOW_START": 6,
+  }
+
+class FileSystems:
+  """
+  Enumeration of File Systems on the resource
+
+  FORK:
+   Forking of commands without any job manager
+
+  PBS:
+   Job manager supporting the Portal Batch System (PBS) protocol. Some examples include TORQUE, PBSPro, Grid Engine.
+
+  UGE:
+   Univa Grid Engine, a variation of PBS implementation.
+
+  SLURM:
+   The Simple Linux Utility for Resource Management is a open source workload manager.
+
+  """
+  HOME = 0
+  WORK = 1
+  LOCALTMP = 2
+  SCRATCH = 3
+  ARCHIVE = 4
+
+  _VALUES_TO_NAMES = {
+    0: "HOME",
+    1: "WORK",
+    2: "LOCALTMP",
+    3: "SCRATCH",
+    4: "ARCHIVE",
+  }
+
+  _NAMES_TO_VALUES = {
+    "HOME": 0,
+    "WORK": 1,
+    "LOCALTMP": 2,
+    "SCRATCH": 3,
+    "ARCHIVE": 4,
+  }
+
+class SecurityProtocol:
+  """
+  Enumeration of security authentication and authorization mechanisms supported by Airavata. This enumeration just
+   describes the supported mechanism. The corresponding security credentials are registered with Airavata Credential
+   store.
+
+  USERNAME_PASSWORD:
+   A User Name.
+
+  SSH_KEYS:
+   SSH Keys
+
+  FIXME: Change GSI to a more precise generic security protocol - X509
+
+  """
+  USERNAME_PASSWORD = 0
+  SSH_KEYS = 1
+  GSI = 2
+  KERBEROS = 3
+  OAUTH = 4
+
+  _VALUES_TO_NAMES = {
+    0: "USERNAME_PASSWORD",
+    1: "SSH_KEYS",
+    2: "GSI",
+    3: "KERBEROS",
+    4: "OAUTH",
+  }
+
+  _NAMES_TO_VALUES = {
+    "USERNAME_PASSWORD": 0,
+    "SSH_KEYS": 1,
+    "GSI": 2,
+    "KERBEROS": 3,
+    "OAUTH": 4,
+  }
+
+class JobSubmissionProtocol:
+  """
+  Enumeration of Airavata supported Job Submission Mechanisms for High Performance Computing Clusters.
+
+  SSH:
+   Execute remote job submission commands using via secure shell protocol.
+
+  GRAM:
+   Execute remote jobs via Globus GRAM service.
+
+  UNICORE:
+   Execute remote jobs via Unicore services
+
+  """
+  LOCAL = 0
+  SSH = 1
+  GLOBUS = 2
+  UNICORE = 3
+  CLOUD = 4
+
+  _VALUES_TO_NAMES = {
+    0: "LOCAL",
+    1: "SSH",
+    2: "GLOBUS",
+    3: "UNICORE",
+    4: "CLOUD",
+  }
+
+  _NAMES_TO_VALUES = {
+    "LOCAL": 0,
+    "SSH": 1,
+    "GLOBUS": 2,
+    "UNICORE": 3,
+    "CLOUD": 4,
+  }
+
+class MonitorMode:
+  """
+  Monitoring modes
+
+  POLL_JOB_MANAGER:
+  GFac need to pull job status changes.
+
+  XSEDE_AMQP_SUBSCRIBE:
+  Server will publish job status changes to amqp servert.
+
+
+  """
+  POLL_JOB_MANAGER = 0
+  XSEDE_AMQP_SUBSCRIBE = 1
+
+  _VALUES_TO_NAMES = {
+    0: "POLL_JOB_MANAGER",
+    1: "XSEDE_AMQP_SUBSCRIBE",
+  }
+
+  _NAMES_TO_VALUES = {
+    "POLL_JOB_MANAGER": 0,
+    "XSEDE_AMQP_SUBSCRIBE": 1,
+  }
+
+class DataMovementProtocol:
+  """
+  Enumeration of data movement supported by Airavata
+
+  SCP:
+   Job manager supporting the Portal Batch System (PBS) protocol. Some examples include TORQUE, PBSPro, Grid Engine.
+
+  SFTP:
+   The Simple Linux Utility for Resource Management is a open source workload manager.
+
+  GridFTP:
+   Globus File Transfer Protocol
+
+  UNICORE_STORAGE_SERVICE:
+   Storage Service Provided by Unicore
+
+  """
+  LOCAL = 0
+  SCP = 1
+  SFTP = 2
+  GridFTP = 3
+  UNICORE_STORAGE_SERVICE = 4
+
+  _VALUES_TO_NAMES = {
+    0: "LOCAL",
+    1: "SCP",
+    2: "SFTP",
+    3: "GridFTP",
+    4: "UNICORE_STORAGE_SERVICE",
+  }
+
+  _NAMES_TO_VALUES = {
+    "LOCAL": 0,
+    "SCP": 1,
+    "SFTP": 2,
+    "GridFTP": 3,
+    "UNICORE_STORAGE_SERVICE": 4,
+  }
+
+class ProviderName:
+  """
+  Provider name
+
+  """
+  EC2 = 0
+  AWSEC2 = 1
+  RACKSPACE = 2
+
+  _VALUES_TO_NAMES = {
+    0: "EC2",
+    1: "AWSEC2",
+    2: "RACKSPACE",
+  }
+
+  _NAMES_TO_VALUES = {
+    "EC2": 0,
+    "AWSEC2": 1,
+    "RACKSPACE": 2,
+  }
+
+
+class ResourceJobManager:
+  """
+  Resource Job Manager Information
+
+  resourceJobManagerType:
+   A typical HPC cluster has a single Job Manager to manage the resources.
+
+  pushMonitoringEndpoint:
+   If the job manager pushes out state changes to a database or bus, specify the service endpoint.
+    Ex: Moab Web Service, Moab MongoDB URL, AMQP (GLUE2) Broker
+
+  jobManagerBinPath:
+   Path to the Job Manager Installation Binary directory.
+
+  jobManagerCommands:
+   An enumeration of commonly used manager commands.
+
+
+  Attributes:
+   - resourceJobManagerId
+   - resourceJobManagerType
+   - pushMonitoringEndpoint
+   - jobManagerBinPath
+   - jobManagerCommands
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'resourceJobManagerId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I32, 'resourceJobManagerType', None, None, ), # 2
+    (3, TType.STRING, 'pushMonitoringEndpoint', None, None, ), # 3
+    (4, TType.STRING, 'jobManagerBinPath', None, None, ), # 4
+    (5, TType.MAP, 'jobManagerCommands', (TType.I32,None,TType.STRING,None), None, ), # 5
+  )
+
+  def __init__(self, resourceJobManagerId=thrift_spec[1][4], resourceJobManagerType=None, pushMonitoringEndpoint=None, jobManagerBinPath=None, jobManagerCommands=None,):
+    self.resourceJobManagerId = resourceJobManagerId
+    self.resourceJobManagerType = resourceJobManagerType
+    self.pushMonitoringEndpoint = pushMonitoringEndpoint
+    self.jobManagerBinPath = jobManagerBinPath
+    self.jobManagerCommands = jobManagerCommands
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.resourceJobManagerId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.resourceJobManagerType = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.pushMonitoringEndpoint = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.jobManagerBinPath = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.MAP:
+          self.jobManagerCommands = {}
+          (_ktype1, _vtype2, _size0 ) = iprot.readMapBegin()
+          for _i4 in xrange(_size0):
+            _key5 = iprot.readI32();
+            _val6 = iprot.readString();
+            self.jobManagerCommands[_key5] = _val6
+          iprot.readMapEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ResourceJobManager')
+    if self.resourceJobManagerId is not None:
+      oprot.writeFieldBegin('resourceJobManagerId', TType.STRING, 1)
+      oprot.writeString(self.resourceJobManagerId)
+      oprot.writeFieldEnd()
+    if self.resourceJobManagerType is not None:
+      oprot.writeFieldBegin('resourceJobManagerType', TType.I32, 2)
+      oprot.writeI32(self.resourceJobManagerType)
+      oprot.writeFieldEnd()
+    if self.pushMonitoringEndpoint is not None:
+      oprot.writeFieldBegin('pushMonitoringEndpoint', TType.STRING, 3)
+      oprot.writeString(self.pushMonitoringEndpoint)
+      oprot.writeFieldEnd()
+    if self.jobManagerBinPath is not None:
+      oprot.writeFieldBegin('jobManagerBinPath', TType.STRING, 4)
+      oprot.writeString(self.jobManagerBinPath)
+      oprot.writeFieldEnd()
+    if self.jobManagerCommands is not None:
+      oprot.writeFieldBegin('jobManagerCommands', TType.MAP, 5)
+      oprot.writeMapBegin(TType.I32, TType.STRING, len(self.jobManagerCommands))
+      for kiter7,viter8 in self.jobManagerCommands.items():
+        oprot.writeI32(kiter7)
+        oprot.writeString(viter8)
+      oprot.writeMapEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.resourceJobManagerId is None:
+      raise TProtocol.TProtocolException(message='Required field resourceJobManagerId is unset!')
+    if self.resourceJobManagerType is None:
+      raise TProtocol.TProtocolException(message='Required field resourceJobManagerType is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class BatchQueue:
+  """
+  Batch Queue Information on SuperComputers
+
+  maxRunTime:
+   Maximum allowed run time in hours.
+
+  Attributes:
+   - queueName
+   - queueDescription
+   - maxRunTime
+   - maxNodes
+   - maxProcessors
+   - maxJobsInQueue
+   - maxMemory
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'queueName', None, None, ), # 1
+    (2, TType.STRING, 'queueDescription', None, None, ), # 2
+    (3, TType.I32, 'maxRunTime', None, None, ), # 3
+    (4, TType.I32, 'maxNodes', None, None, ), # 4
+    (5, TType.I32, 'maxProcessors', None, None, ), # 5
+    (6, TType.I32, 'maxJobsInQueue', None, None, ), # 6
+    (7, TType.I32, 'maxMemory', None, None, ), # 7
+  )
+
+  def __init__(self, queueName=None, queueDescription=None, maxRunTime=None, maxNodes=None, maxProcessors=None, maxJobsInQueue=None, maxMemory=None,):
+    self.queueName = queueName
+    self.queueDescription = queueDescription
+    self.maxRunTime = maxRunTime
+    self.maxNodes = maxNodes
+    self.maxProcessors = maxProcessors
+    self.maxJobsInQueue = maxJobsInQueue
+    self.maxMemory = maxMemory
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.queueName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.queueDescription = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.maxRunTime = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I32:
+          self.maxNodes = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.I32:
+          self.maxProcessors = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.I32:
+          self.maxJobsInQueue = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.I32:
+          self.maxMemory = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('BatchQueue')
+    if self.queueName is not None:
+      oprot.writeFieldBegin('queueName', TType.STRING, 1)
+      oprot.writeString(self.queueName)
+      oprot.writeFieldEnd()
+    if self.queueDescription is not None:
+      oprot.writeFieldBegin('queueDescription', TType.STRING, 2)
+      oprot.writeString(self.queueDescription)
+      oprot.writeFieldEnd()
+    if self.maxRunTime is not None:
+      oprot.writeFieldBegin('maxRunTime', TType.I32, 3)
+      oprot.writeI32(self.maxRunTime)
+      oprot.writeFieldEnd()
+    if self.maxNodes is not None:
+      oprot.writeFieldBegin('maxNodes', TType.I32, 4)
+      oprot.writeI32(self.maxNodes)
+      oprot.writeFieldEnd()
+    if self.maxProcessors is not None:
+      oprot.writeFieldBegin('maxProcessors', TType.I32, 5)
+      oprot.writeI32(self.maxProcessors)
+      oprot.writeFieldEnd()
+    if self.maxJobsInQueue is not None:
+      oprot.writeFieldBegin('maxJobsInQueue', TType.I32, 6)
+      oprot.writeI32(self.maxJobsInQueue)
+      oprot.writeFieldEnd()
+    if self.maxMemory is not None:
+      oprot.writeFieldBegin('maxMemory', TType.I32, 7)
+      oprot.writeI32(self.maxMemory)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.queueName is None:
+      raise TProtocol.TProtocolException(message='Required field queueName is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class SCPDataMovement:
+  """
+  Data Movement through Secured Copy
+
+  alternativeSCPHostName:
+   If the login to scp is different than the hostname itself, specify it here
+
+  sshPort:
+   If a non-default port needs to used, specify it.
+
+  Attributes:
+   - dataMovementInterfaceId
+   - securityProtocol
+   - alternativeSCPHostName
+   - sshPort
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'dataMovementInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I32, 'securityProtocol', None, None, ), # 2
+    (3, TType.STRING, 'alternativeSCPHostName', None, None, ), # 3
+    (4, TType.I32, 'sshPort', None, 22, ), # 4
+  )
+
+  def __init__(self, dataMovementInterfaceId=thrift_spec[1][4], securityProtocol=None, alternativeSCPHostName=None, sshPort=thrift_spec[4][4],):
+    self.dataMovementInterfaceId = dataMovementInterfaceId
+    self.securityProtocol = securityProtocol
+    self.alternativeSCPHostName = alternativeSCPHostName
+    self.sshPort = sshPort
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.dataMovementInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.securityProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.alternativeSCPHostName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I32:
+          self.sshPort = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('SCPDataMovement')
+    if self.dataMovementInterfaceId is not None:
+      oprot.writeFieldBegin('dataMovementInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.dataMovementInterfaceId)
+      oprot.writeFieldEnd()
+    if self.securityProtocol is not None:
+      oprot.writeFieldBegin('securityProtocol', TType.I32, 2)
+      oprot.writeI32(self.securityProtocol)
+      oprot.writeFieldEnd()
+    if self.alternativeSCPHostName is not None:
+      oprot.writeFieldBegin('alternativeSCPHostName', TType.STRING, 3)
+      oprot.writeString(self.alternativeSCPHostName)
+      oprot.writeFieldEnd()
+    if self.sshPort is not None:
+      oprot.writeFieldBegin('sshPort', TType.I32, 4)
+      oprot.writeI32(self.sshPort)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.dataMovementInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field dataMovementInterfaceId is unset!')
+    if self.securityProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field securityProtocol is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class GridFTPDataMovement:
+  """
+  Data Movement through GridFTP
+
+  alternativeSCPHostName:
+   If the login to scp is different than the hostname itself, specify it here
+
+  sshPort:
+   If a non-default port needs to used, specify it.
+
+  Attributes:
+   - dataMovementInterfaceId
+   - securityProtocol
+   - gridFTPEndPoints
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'dataMovementInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I32, 'securityProtocol', None, None, ), # 2
+    (3, TType.LIST, 'gridFTPEndPoints', (TType.STRING,None), None, ), # 3
+  )
+
+  def __init__(self, dataMovementInterfaceId=thrift_spec[1][4], securityProtocol=None, gridFTPEndPoints=None,):
+    self.dataMovementInterfaceId = dataMovementInterfaceId
+    self.securityProtocol = securityProtocol
+    self.gridFTPEndPoints = gridFTPEndPoints
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.dataMovementInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.securityProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.LIST:
+          self.gridFTPEndPoints = []
+          (_etype12, _size9) = iprot.readListBegin()
+          for _i13 in xrange(_size9):
+            _elem14 = iprot.readString();
+            self.gridFTPEndPoints.append(_elem14)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('GridFTPDataMovement')
+    if self.dataMovementInterfaceId is not None:
+      oprot.writeFieldBegin('dataMovementInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.dataMovementInterfaceId)
+      oprot.writeFieldEnd()
+    if self.securityProtocol is not None:
+      oprot.writeFieldBegin('securityProtocol', TType.I32, 2)
+      oprot.writeI32(self.securityProtocol)
+      oprot.writeFieldEnd()
+    if self.gridFTPEndPoints is not None:
+      oprot.writeFieldBegin('gridFTPEndPoints', TType.LIST, 3)
+      oprot.writeListBegin(TType.STRING, len(self.gridFTPEndPoints))
+      for iter15 in self.gridFTPEndPoints:
+        oprot.writeString(iter15)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.dataMovementInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field dataMovementInterfaceId is unset!')
+    if self.securityProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field securityProtocol is unset!')
+    if self.gridFTPEndPoints is None:
+      raise TProtocol.TProtocolException(message='Required field gridFTPEndPoints is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class UnicoreDataMovement:
+  """
+  Data Movement through UnicoreStorage
+
+  unicoreEndPointURL:
+   unicoreGateway End Point. The provider will query this service to fetch required service end points.
+
+  Attributes:
+   - dataMovementInterfaceId
+   - securityProtocol
+   - unicoreEndPointURL
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'dataMovementInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I32, 'securityProtocol', None, None, ), # 2
+    (3, TType.STRING, 'unicoreEndPointURL', None, None, ), # 3
+  )
+
+  def __init__(self, dataMovementInterfaceId=thrift_spec[1][4], securityProtocol=None, unicoreEndPointURL=None,):
+    self.dataMovementInterfaceId = dataMovementInterfaceId
+    self.securityProtocol = securityProtocol
+    self.unicoreEndPointURL = unicoreEndPointURL
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.dataMovementInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.securityProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.unicoreEndPointURL = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('UnicoreDataMovement')
+    if self.dataMovementInterfaceId is not None:
+      oprot.writeFieldBegin('dataMovementInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.dataMovementInterfaceId)
+      oprot.writeFieldEnd()
+    if self.securityProtocol is not None:
+      oprot.writeFieldBegin('securityProtocol', TType.I32, 2)
+      oprot.writeI32(self.securityProtocol)
+      oprot.writeFieldEnd()
+    if self.unicoreEndPointURL is not None:
+      oprot.writeFieldBegin('unicoreEndPointURL', TType.STRING, 3)
+      oprot.writeString(self.unicoreEndPointURL)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.dataMovementInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field dataMovementInterfaceId is unset!')
+    if self.securityProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field securityProtocol is unset!')
+    if self.unicoreEndPointURL is None:
+      raise TProtocol.TProtocolException(message='Required field unicoreEndPointURL is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class LOCALSubmission:
+  """
+  Locally Fork Jobs as OS processes
+
+  alternativeSSHHostName:
+   If the login to ssh is different than the hostname itself, specify it here
+
+  sshPort:
+   If a non-default port needs to used, specify it.
+
+  Attributes:
+   - jobSubmissionInterfaceId
+   - resourceJobManager
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'jobSubmissionInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.STRUCT, 'resourceJobManager', (ResourceJobManager, ResourceJobManager.thrift_spec), None, ), # 2
+  )
+
+  def __init__(self, jobSubmissionInterfaceId=thrift_spec[1][4], resourceJobManager=None,):
+    self.jobSubmissionInterfaceId = jobSubmissionInterfaceId
+    self.resourceJobManager = resourceJobManager
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.jobSubmissionInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRUCT:
+          self.resourceJobManager = ResourceJobManager()
+          self.resourceJobManager.read(iprot)
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('LOCALSubmission')
+    if self.jobSubmissionInterfaceId is not None:
+      oprot.writeFieldBegin('jobSubmissionInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.jobSubmissionInterfaceId)
+      oprot.writeFieldEnd()
+    if self.resourceJobManager is not None:
+      oprot.writeFieldBegin('resourceJobManager', TType.STRUCT, 2)
+      self.resourceJobManager.write(oprot)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobSubmissionInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field jobSubmissionInterfaceId is unset!')
+    if self.resourceJobManager is None:
+      raise TProtocol.TProtocolException(message='Required field resourceJobManager is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class LOCALDataMovement:
+  """
+  LOCAL
+
+  alternativeSCPHostName:
+   If the login to scp is different than the hostname itself, specify it here
+
+  sshPort:
+   If a non-defualt port needs to used, specify it.
+
+  Attributes:
+   - dataMovementInterfaceId
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'dataMovementInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+  )
+
+  def __init__(self, dataMovementInterfaceId=thrift_spec[1][4],):
+    self.dataMovementInterfaceId = dataMovementInterfaceId
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.dataMovementInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('LOCALDataMovement')
+    if self.dataMovementInterfaceId is not None:
+      oprot.writeFieldBegin('dataMovementInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.dataMovementInterfaceId)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.dataMovementInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field dataMovementInterfaceId is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class SSHJobSubmission:
+  """
+  Authenticate using Secured Shell
+
+  alternativeSSHHostName:
+   If the login to ssh is different than the hostname itself, specify it here
+
+  sshPort:
+   If a non-default port needs to used, specify it.
+
+  Attributes:
+   - jobSubmissionInterfaceId
+   - securityProtocol
+   - resourceJobManager
+   - alternativeSSHHostName
+   - sshPort
+   - monitorMode
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'jobSubmissionInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I32, 'securityProtocol', None, None, ), # 2
+    (3, TType.STRUCT, 'resourceJobManager', (ResourceJobManager, ResourceJobManager.thrift_spec), None, ), # 3
+    (4, TType.STRING, 'alternativeSSHHostName', None, None, ), # 4
+    (5, TType.I32, 'sshPort', None, 22, ), # 5
+    (6, TType.I32, 'monitorMode', None, None, ), # 6
+  )
+
+  def __init__(self, jobSubmissionInterfaceId=thrift_spec[1][4], securityProtocol=None, resourceJobManager=None, alternativeSSHHostName=None, sshPort=thrift_spec[5][4], monitorMode=None,):
+    self.jobSubmissionInterfaceId = jobSubmissionInterfaceId
+    self.securityProtocol = securityProtocol
+    self.resourceJobManager = resourceJobManager
+    self.alternativeSSHHostName = alternativeSSHHostName
+    self.sshPort = sshPort
+    self.monitorMode = monitorMode
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.jobSubmissionInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.securityProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRUCT:
+          self.resourceJobManager = ResourceJobManager()
+          self.resourceJobManager.read(iprot)
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.alternativeSSHHostName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.I32:
+          self.sshPort = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.I32:
+          self.monitorMode = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('SSHJobSubmission')
+    if self.jobSubmissionInterfaceId is not None:
+      oprot.writeFieldBegin('jobSubmissionInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.jobSubmissionInterfaceId)
+      oprot.writeFieldEnd()
+    if self.securityProtocol is not None:
+      oprot.writeFieldBegin('securityProtocol', TType.I32, 2)
+      oprot.writeI32(self.securityProtocol)
+      oprot.writeFieldEnd()
+    if self.resourceJobManager is not None:
+      oprot.writeFieldBegin('resourceJobManager', TType.STRUCT, 3)
+      self.resourceJobManager.write(oprot)
+      oprot.writeFieldEnd()
+    if self.alternativeSSHHostName is not None:
+      oprot.writeFieldBegin('alternativeSSHHostName', TType.STRING, 4)
+      oprot.writeString(self.alternativeSSHHostName)
+      oprot.writeFieldEnd()
+    if self.sshPort is not None:
+      oprot.writeFieldBegin('sshPort', TType.I32, 5)
+      oprot.writeI32(self.sshPort)
+      oprot.writeFieldEnd()
+    if self.monitorMode is not None:
+      oprot.writeFieldBegin('monitorMode', TType.I32, 6)
+      oprot.writeI32(self.monitorMode)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobSubmissionInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field jobSubmissionInterfaceId is unset!')
+    if self.securityProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field securityProtocol is unset!')
+    if self.resourceJobManager is None:
+      raise TProtocol.TProtocolException(message='Required field resourceJobManager is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class GlobusJobSubmission:
+  """
+  Attributes:
+   - jobSubmissionInterfaceId
+   - securityProtocol
+   - globusGateKeeperEndPoint
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'jobSubmissionInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I32, 'securityProtocol', None, None, ), # 2
+    (3, TType.LIST, 'globusGateKeeperEndPoint', (TType.STRING,None), None, ), # 3
+  )
+
+  def __init__(self, jobSubmissionInterfaceId=thrift_spec[1][4], securityProtocol=None, globusGateKeeperEndPoint=None,):
+    self.jobSubmissionInterfaceId = jobSubmissionInterfaceId
+    self.securityProtocol = securityProtocol
+    self.globusGateKeeperEndPoint = globusGateKeeperEndPoint
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.jobSubmissionInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.securityProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.LIST:
+          self.globusGateKeeperEndPoint = []
+          (_etype19, _size16) = iprot.readListBegin()
+          for _i20 in xrange(_size16):
+            _elem21 = iprot.readString();
+            self.globusGateKeeperEndPoint.append(_elem21)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('GlobusJobSubmission')
+    if self.jobSubmissionInterfaceId is not None:
+      oprot.writeFieldBegin('jobSubmissionInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.jobSubmissionInterfaceId)
+      oprot.writeFieldEnd()
+    if self.securityProtocol is not None:
+      oprot.writeFieldBegin('securityProtocol', TType.I32, 2)
+      oprot.writeI32(self.securityProtocol)
+      oprot.writeFieldEnd()
+    if self.globusGateKeeperEndPoint is not None:
+      oprot.writeFieldBegin('globusGateKeeperEndPoint', TType.LIST, 3)
+      oprot.writeListBegin(TType.STRING, len(self.globusGateKeeperEndPoint))
+      for iter22 in self.globusGateKeeperEndPoint:
+        oprot.writeString(iter22)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobSubmissionInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field jobSubmissionInterfaceId is unset!')
+    if self.securityProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field securityProtocol is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class UnicoreJobSubmission:
+  """
+  Unicore Job Submission
+
+  unicoreEndPointURL:
+   unicoreGateway End Point. The provider will query this service to fetch required service end points.
+  authenticationMode
+   The authenticationMode defines the way certificate is fetched.
+
+  Attributes:
+   - jobSubmissionInterfaceId
+   - securityProtocol
+   - unicoreEndPointURL
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'jobSubmissionInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I32, 'securityProtocol', None, None, ), # 2
+    (3, TType.STRING, 'unicoreEndPointURL', None, None, ), # 3
+  )
+
+  def __init__(self, jobSubmissionInterfaceId=thrift_spec[1][4], securityProtocol=None, unicoreEndPointURL=None,):
+    self.jobSubmissionInterfaceId = jobSubmissionInterfaceId
+    self.securityProtocol = securityProtocol
+    self.unicoreEndPointURL = unicoreEndPointURL
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.jobSubmissionInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.securityProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.unicoreEndPointURL = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('UnicoreJobSubmission')
+    if self.jobSubmissionInterfaceId is not None:
+      oprot.writeFieldBegin('jobSubmissionInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.jobSubmissionInterfaceId)
+      oprot.writeFieldEnd()
+    if self.securityProtocol is not None:
+      oprot.writeFieldBegin('securityProtocol', TType.I32, 2)
+      oprot.writeI32(self.securityProtocol)
+      oprot.writeFieldEnd()
+    if self.unicoreEndPointURL is not None:
+      oprot.writeFieldBegin('unicoreEndPointURL', TType.STRING, 3)
+      oprot.writeString(self.unicoreEndPointURL)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobSubmissionInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field jobSubmissionInterfaceId is unset!')
+    if self.securityProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field securityProtocol is unset!')
+    if self.unicoreEndPointURL is None:
+      raise TProtocol.TProtocolException(message='Required field unicoreEndPointURL is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class CloudJobSubmission:
+  """
+  Cloud Job Submission
+
+
+
+  Attributes:
+   - jobSubmissionInterfaceId
+   - securityProtocol
+   - nodeId
+   - executableType
+   - providerName
+   - userAccountName
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'jobSubmissionInterfaceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.I32, 'securityProtocol', None, None, ), # 2
+    (3, TType.STRING, 'nodeId', None, None, ), # 3
+    (4, TType.STRING, 'executableType', None, None, ), # 4
+    (5, TType.I32, 'providerName', None, None, ), # 5
+    (6, TType.STRING, 'userAccountName', None, None, ), # 6
+  )
+
+  def __init__(self, jobSubmissionInterfaceId=thrift_spec[1][4], securityProtocol=None, nodeId=None, executableType=None, providerName=None, userAccountName=None,):
+    self.jobSubmissionInterfaceId = jobSubmissionInterfaceId
+    self.securityProtocol = securityProtocol
+    self.nodeId = nodeId
+    self.executableType = executableType
+    self.providerName = providerName
+    self.userAccountName = userAccountName
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.jobSubmissionInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.securityProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.nodeId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.executableType = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.I32:
+          self.providerName = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.STRING:
+          self.userAccountName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('CloudJobSubmission')
+    if self.jobSubmissionInterfaceId is not None:
+      oprot.writeFieldBegin('jobSubmissionInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.jobSubmissionInterfaceId)
+      oprot.writeFieldEnd()
+    if self.securityProtocol is not None:
+      oprot.writeFieldBegin('securityProtocol', TType.I32, 2)
+      oprot.writeI32(self.securityProtocol)
+      oprot.writeFieldEnd()
+    if self.nodeId is not None:
+      oprot.writeFieldBegin('nodeId', TType.STRING, 3)
+      oprot.writeString(self.nodeId)
+      oprot.writeFieldEnd()
+    if self.executableType is not None:
+      oprot.writeFieldBegin('executableType', TType.STRING, 4)
+      oprot.writeString(self.executableType)
+      oprot.writeFieldEnd()
+    if self.providerName is not None:
+      oprot.writeFieldBegin('providerName', TType.I32, 5)
+      oprot.writeI32(self.providerName)
+      oprot.writeFieldEnd()
+    if self.userAccountName is not None:
+      oprot.writeFieldBegin('userAccountName', TType.STRING, 6)
+      oprot.writeString(self.userAccountName)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobSubmissionInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field jobSubmissionInterfaceId is unset!')
+    if self.securityProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field securityProtocol is unset!')
+    if self.nodeId is None:
+      raise TProtocol.TProtocolException(message='Required field nodeId is unset!')
+    if self.executableType is None:
+      raise TProtocol.TProtocolException(message='Required field executableType is unset!')
+    if self.providerName is None:
+      raise TProtocol.TProtocolException(message='Required field providerName is unset!')
+    if self.userAccountName is None:
+      raise TProtocol.TProtocolException(message='Required field userAccountName is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class JobSubmissionInterface:
+  """
+  Job Submission Interfaces
+
+  jobSubmissionInterfaceId: The Job Submission Interface has to be previously registered and referenced here.
+
+  priorityOrder:
+   For resources with multiple interfaces, the priority order should be selected.
+    Lower the numerical number, higher the priority
+
+
+  Attributes:
+   - jobSubmissionInterfaceId
+   - jobSubmissionProtocol
+   - priorityOrder
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'jobSubmissionInterfaceId', None, None, ), # 1
+    (2, TType.I32, 'jobSubmissionProtocol', None, None, ), # 2
+    (3, TType.I32, 'priorityOrder', None, 0, ), # 3
+  )
+
+  def __init__(self, jobSubmissionInterfaceId=None, jobSubmissionProtocol=None, priorityOrder=thrift_spec[3][4],):
+    self.jobSubmissionInterfaceId = jobSubmissionInterfaceId
+    self.jobSubmissionProtocol = jobSubmissionProtocol
+    self.priorityOrder = priorityOrder
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.jobSubmissionInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.jobSubmissionProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.priorityOrder = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('JobSubmissionInterface')
+    if self.jobSubmissionInterfaceId is not None:
+      oprot.writeFieldBegin('jobSubmissionInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.jobSubmissionInterfaceId)
+      oprot.writeFieldEnd()
+    if self.jobSubmissionProtocol is not None:
+      oprot.writeFieldBegin('jobSubmissionProtocol', TType.I32, 2)
+      oprot.writeI32(self.jobSubmissionProtocol)
+      oprot.writeFieldEnd()
+    if self.priorityOrder is not None:
+      oprot.writeFieldBegin('priorityOrder', TType.I32, 3)
+      oprot.writeI32(self.priorityOrder)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.jobSubmissionInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field jobSubmissionInterfaceId is unset!')
+    if self.jobSubmissionProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field jobSubmissionProtocol is unset!')
+    if self.priorityOrder is None:
+      raise TProtocol.TProtocolException(message='Required field priorityOrder is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class DataMovementInterface:
+  """
+  Data Movement Interfaces
+
+  dataMovementInterfaceId: The Data Movement Interface has to be previously registered and referenced here.
+
+  priorityOrder:
+   For resources with multiple interfaces, the priority order should be selected.
+    Lower the numerical number, higher the priority
+
+
+  Attributes:
+   - dataMovementInterfaceId
+   - dataMovementProtocol
+   - priorityOrder
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'dataMovementInterfaceId', None, None, ), # 1
+    (2, TType.I32, 'dataMovementProtocol', None, None, ), # 2
+    (3, TType.I32, 'priorityOrder', None, 0, ), # 3
+  )
+
+  def __init__(self, dataMovementInterfaceId=None, dataMovementProtocol=None, priorityOrder=thrift_spec[3][4],):
+    self.dataMovementInterfaceId = dataMovementInterfaceId
+    self.dataMovementProtocol = dataMovementProtocol
+    self.priorityOrder = priorityOrder
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.dataMovementInterfaceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.I32:
+          self.dataMovementProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.I32:
+          self.priorityOrder = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('DataMovementInterface')
+    if self.dataMovementInterfaceId is not None:
+      oprot.writeFieldBegin('dataMovementInterfaceId', TType.STRING, 1)
+      oprot.writeString(self.dataMovementInterfaceId)
+      oprot.writeFieldEnd()
+    if self.dataMovementProtocol is not None:
+      oprot.writeFieldBegin('dataMovementProtocol', TType.I32, 2)
+      oprot.writeI32(self.dataMovementProtocol)
+      oprot.writeFieldEnd()
+    if self.priorityOrder is not None:
+      oprot.writeFieldBegin('priorityOrder', TType.I32, 3)
+      oprot.writeI32(self.priorityOrder)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.dataMovementInterfaceId is None:
+      raise TProtocol.TProtocolException(message='Required field dataMovementInterfaceId is unset!')
+    if self.dataMovementProtocol is None:
+      raise TProtocol.TProtocolException(message='Required field dataMovementProtocol is unset!')
+    if self.priorityOrder is None:
+      raise TProtocol.TProtocolException(message='Required field priorityOrder is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class ComputeResourceDescription:
+  """
+  Computational Resource Description
+
+  computeResourceId: Airavata Internal Unique Identifier to distinguish Compute Resource.
+
+  hostName:
+    Fully Qualified Host Name.
+
+  hostAliases:
+    Aliases if any.
+
+  ipAddress:
+    IP Addresses of the Resource.
+
+  resourceDescription:
+   A user friendly description of the resource.
+
+  JobSubmissionProtocols:
+   A computational resources may have one or more ways of submitting Jobs. This structure
+     will hold all available mechanisms to interact with the resource.
+   The key is the priority
+
+  DataMovementProtocol:
+   Option to specify a prefered data movement mechanism of the available options.
+
+  fileSystems:
+   Map of file systems type and the path.
+
+
+  Attributes:
+   - computeResourceId
+   - hostName
+   - hostAliases
+   - ipAddresses
+   - resourceDescription
+   - batchQueues
+   - fileSystems
+   - jobSubmissionInterfaces
+   - dataMovementInterfaces
+   - maxMemoryPerNode
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'computeResourceId', None, "DO_NOT_SET_AT_CLIENTS", ), # 1
+    (2, TType.STRING, 'hostName', None, None, ), # 2
+    (3, TType.LIST, 'hostAliases', (TType.STRING,None), None, ), # 3
+    (4, TType.LIST, 'ipAddresses', (TType.STRING,None), None, ), # 4
+    (5, TType.STRING, 'resourceDescription', None, None, ), # 5
+    (6, TType.LIST, 'batchQueues', (TType.STRUCT,(BatchQueue, BatchQueue.thrift_spec)), None, ), # 6
+    (7, TType.MAP, 'fileSystems', (TType.I32,None,TType.STRING,None), None, ), # 7
+    (8, TType.LIST, 'jobSubmissionInterfaces', (TType.STRUCT,(JobSubmissionInterface, JobSubmissionInterface.thrift_spec)), None, ), # 8
+    (9, TType.LIST, 'dataMovementInterfaces', (TType.STRUCT,(DataMovementInterface, DataMovementInterface.thrift_spec)), None, ), # 9
+    (10, TType.I32, 'maxMemoryPerNode', None, None, ), # 10
+  )
+
+  def __init__(self, computeResourceId=thrift_spec[1][4], hostName=None, hostAliases=None, ipAddresses=None, resourceDescription=None, batchQueues=None, fileSystems=None, jobSubmissionInterfaces=None, dataMovementInterfaces=None, maxMemoryPerNode=None,):
+    self.computeResourceId = computeResourceId
+    self.hostName = hostName
+    self.hostAliases = hostAliases
+    self.ipAddresses = ipAddresses
+    self.resourceDescription = resourceDescription
+    self.batchQueues = batchQueues
+    self.fileSystems = fileSystems
+    self.jobSubmissionInterfaces = jobSubmissionInterfaces
+    self.dataMovementInterfaces = dataMovementInterfaces
+    self.maxMemoryPerNode = maxMemoryPerNode
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.computeResourceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.hostName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.LIST:
+          self.hostAliases = []
+          (_etype26, _size23) = iprot.readListBegin()
+          for _i27 in xrange(_size23):
+            _elem28 = iprot.readString();
+            self.hostAliases.append(_elem28)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.LIST:
+          self.ipAddresses = []
+          (_etype32, _size29) = iprot.readListBegin()
+          for _i33 in xrange(_size29):
+            _elem34 = iprot.readString();
+            self.ipAddresses.append(_elem34)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.STRING:
+          self.resourceDescription = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.LIST:
+          self.batchQueues = []
+          (_etype38, _size35) = iprot.readListBegin()
+          for _i39 in xrange(_size35):
+            _elem40 = BatchQueue()
+            _elem40.read(iprot)
+            self.batchQueues.append(_elem40)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.MAP:
+          self.fileSystems = {}
+          (_ktype42, _vtype43, _size41 ) = iprot.readMapBegin()
+          for _i45 in xrange(_size41):
+            _key46 = iprot.readI32();
+            _val47 = iprot.readString();
+            self.fileSystems[_key46] = _val47
+          iprot.readMapEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.LIST:
+          self.jobSubmissionInterfaces = []
+          (_etype51, _size48) = iprot.readListBegin()
+          for _i52 in xrange(_size48):
+            _elem53 = JobSubmissionInterface()
+            _elem53.read(iprot)
+            self.jobSubmissionInterfaces.append(_elem53)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 9:
+        if ftype == TType.LIST:
+          self.dataMovementInterfaces = []
+          (_etype57, _size54) = iprot.readListBegin()
+          for _i58 in xrange(_size54):
+            _elem59 = DataMovementInterface()
+            _elem59.read(iprot)
+            self.dataMovementInterfaces.append(_elem59)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 10:
+        if ftype == TType.I32:
+          self.maxMemoryPerNode = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ComputeResourceDescription')
+    if self.computeResourceId is not None:
+      oprot.writeFieldBegin('computeResourceId', TType.STRING, 1)
+      oprot.writeString(self.computeResourceId)
+      oprot.writeFieldEnd()
+    if self.hostName is not None:
+      oprot.writeFieldBegin('hostName', TType.STRING, 2)
+      oprot.writeString(self.hostName)
+      oprot.writeFieldEnd()
+    if self.hostAliases is not None:
+      oprot.writeFieldBegin('hostAliases', TType.LIST, 3)
+      oprot.writeListBegin(TType.STRING, len(self.hostAliases))
+      for iter60 in self.hostAliases:
+        oprot.writeString(iter60)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.ipAddresses is not None:
+      oprot.writeFieldBegin('ipAddresses', TType.LIST, 4)
+      oprot.writeListBegin(TType.STRING, len(self.ipAddresses))
+      for iter61 in self.ipAddresses:
+        oprot.writeString(iter61)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.resourceDescription is not None:
+      oprot.writeFieldBegin('resourceDescription', TType.STRING, 5)
+      oprot.writeString(self.resourceDescription)
+      oprot.writeFieldEnd()
+    if self.batchQueues is not None:
+      oprot.writeFieldBegin('batchQueues', TType.LIST, 6)
+      oprot.writeListBegin(TType.STRUCT, len(self.batchQueues))
+      for iter62 in self.batchQueues:
+        iter62.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.fileSystems is not None:
+      oprot.writeFieldBegin('fileSystems', TType.MAP, 7)
+      oprot.writeMapBegin(TType.I32, TType.STRING, len(self.fileSystems))
+      for kiter63,viter64 in self.fileSystems.items():
+        oprot.writeI32(kiter63)
+        oprot.writeString(viter64)
+      oprot.writeMapEnd()
+      oprot.writeFieldEnd()
+    if self.jobSubmissionInterfaces is not None:
+      oprot.writeFieldBegin('jobSubmissionInterfaces', TType.LIST, 8)
+      oprot.writeListBegin(TType.STRUCT, len(self.jobSubmissionInterfaces))
+      for iter65 in self.jobSubmissionInterfaces:
+        iter65.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.dataMovementInterfaces is not None:
+      oprot.writeFieldBegin('dataMovementInterfaces', TType.LIST, 9)
+      oprot.writeListBegin(TType.STRUCT, len(self.dataMovementInterfaces))
+      for iter66 in self.dataMovementInterfaces:
+        iter66.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.maxMemoryPerNode is not None:
+      oprot.writeFieldBegin('maxMemoryPerNode', TType.I32, 10)
+      oprot.writeI32(self.maxMemoryPerNode)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.computeResourceId is None:
+      raise TProtocol.TProtocolException(message='Required field computeResourceId is unset!')
+    if self.hostName is None:
+      raise TProtocol.TProtocolException(message='Required field hostName is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/constants.py
new file mode 100644
index 0000000..35216c6
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/constants.py
@@ -0,0 +1,11 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/ttypes.py
new file mode 100644
index 0000000..56c3b0c
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/appcatalog/gatewayprofile/ttypes.py
@@ -0,0 +1,290 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import apache.airavata.model.appcatalog.computeresource.ttypes
+
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+
+class ComputeResourcePreference:
+  """
+  Gateway specific preferences for a Computer Resource
+
+  computeResourceId:
+    Corelate the preference to a compute resource.
+
+  overridebyAiravata:
+    If turned true, Airavata will override the preferences of better alternatives exist.
+
+  loginUserName:
+    If turned true, Airavata will override the preferences of better alternatives exist.
+
+  preferredJobSubmissionProtocol:
+    For resources with multiple job submission protocols, the gateway can pick a preferred option.
+
+  preferredDataMovementProtocol:
+    For resources with multiple data movement protocols, the gateway can pick a preferred option.
+
+  preferredBatchQueue:
+   Gateways can choose a defualt batch queue based on average job dimention, reservations or other metrics.
+
+  scratchLocation:
+   Path to the local scratch space on a HPC cluster. Typically used to create working directory for job execution.
+
+  allocationProjectNumber:
+   Typically used on HPC machines to charge computing usage to a account number. For instance, on XSEDE once an
+     allocation is approved, an allocation number is assigned. Before passing this number with job submittions, the
+     account to be used has to be added to the allocation.
+
+
+  Attributes:
+   - computeResourceId
+   - overridebyAiravata
+   - loginUserName
+   - preferredJobSubmissionProtocol
+   - preferredDataMovementProtocol
+   - preferredBatchQueue
+   - scratchLocation
+   - allocationProjectNumber
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'computeResourceId', None, None, ), # 1
+    (2, TType.BOOL, 'overridebyAiravata', None, True, ), # 2
+    (3, TType.STRING, 'loginUserName', None, None, ), # 3
+    (4, TType.I32, 'preferredJobSubmissionProtocol', None, None, ), # 4
+    (5, TType.I32, 'preferredDataMovementProtocol', None, None, ), # 5
+    (6, TType.STRING, 'preferredBatchQueue', None, None, ), # 6
+    (7, TType.STRING, 'scratchLocation', None, None, ), # 7
+    (8, TType.STRING, 'allocationProjectNumber', None, None, ), # 8
+  )
+
+  def __init__(self, computeResourceId=None, overridebyAiravata=thrift_spec[2][4], loginUserName=None, preferredJobSubmissionProtocol=None, preferredDataMovementProtocol=None, preferredBatchQueue=None, scratchLocation=None, allocationProjectNumber=None,):
+    self.computeResourceId = computeResourceId
+    self.overridebyAiravata = overridebyAiravata
+    self.loginUserName = loginUserName
+    self.preferredJobSubmissionProtocol = preferredJobSubmissionProtocol
+    self.preferredDataMovementProtocol = preferredDataMovementProtocol
+    self.preferredBatchQueue = preferredBatchQueue
+    self.scratchLocation = scratchLocation
+    self.allocationProjectNumber = allocationProjectNumber
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.computeResourceId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.BOOL:
+          self.overridebyAiravata = iprot.readBool();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.loginUserName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.I32:
+          self.preferredJobSubmissionProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.I32:
+          self.preferredDataMovementProtocol = iprot.readI32();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.STRING:
+          self.preferredBatchQueue = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.STRING:
+          self.scratchLocation = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 8:
+        if ftype == TType.STRING:
+          self.allocationProjectNumber = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('ComputeResourcePreference')
+    if self.computeResourceId is not None:
+      oprot.writeFieldBegin('computeResourceId', TType.STRING, 1)
+      oprot.writeString(self.computeResourceId)
+      oprot.writeFieldEnd()
+    if self.overridebyAiravata is not None:
+      oprot.writeFieldBegin('overridebyAiravata', TType.BOOL, 2)
+      oprot.writeBool(self.overridebyAiravata)
+      oprot.writeFieldEnd()
+    if self.loginUserName is not None:
+      oprot.writeFieldBegin('loginUserName', TType.STRING, 3)
+      oprot.writeString(self.loginUserName)
+      oprot.writeFieldEnd()
+    if self.preferredJobSubmissionProtocol is not None:
+      oprot.writeFieldBegin('preferredJobSubmissionProtocol', TType.I32, 4)
+      oprot.writeI32(self.preferredJobSubmissionProtocol)
+      oprot.writeFieldEnd()
+    if self.preferredDataMovementProtocol is not None:
+      oprot.writeFieldBegin('preferredDataMovementProtocol', TType.I32, 5)
+      oprot.writeI32(self.preferredDataMovementProtocol)
+      oprot.writeFieldEnd()
+    if self.preferredBatchQueue is not None:
+      oprot.writeFieldBegin('preferredBatchQueue', TType.STRING, 6)
+      oprot.writeString(self.preferredBatchQueue)
+      oprot.writeFieldEnd()
+    if self.scratchLocation is not None:
+      oprot.writeFieldBegin('scratchLocation', TType.STRING, 7)
+      oprot.writeString(self.scratchLocation)
+      oprot.writeFieldEnd()
+    if self.allocationProjectNumber is not None:
+      oprot.writeFieldBegin('allocationProjectNumber', TType.STRING, 8)
+      oprot.writeString(self.allocationProjectNumber)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.computeResourceId is None:
+      raise TProtocol.TProtocolException(message='Required field computeResourceId is unset!')
+    if self.overridebyAiravata is None:
+      raise TProtocol.TProtocolException(message='Required field overridebyAiravata is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class GatewayResourceProfile:
+  """
+  Gateway Resource Profile
+
+  gatewayID:
+    Unique identifier for the gateway assigned by Airavata. Corelate this to Airavata Admin API Gateway Registration.
+
+  computeResourcePreferences:
+   List of resource preferences for each of the registered compute resources.
+
+
+
+  Attributes:
+   - gatewayID
+   - computeResourcePreferences
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'gatewayID', None, None, ), # 1
+    (2, TType.LIST, 'computeResourcePreferences', (TType.STRUCT,(ComputeResourcePreference, ComputeResourcePreference.thrift_spec)), None, ), # 2
+  )
+
+  def __init__(self, gatewayID=None, computeResourcePreferences=None,):
+    self.gatewayID = gatewayID
+    self.computeResourcePreferences = computeResourcePreferences
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.gatewayID = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.LIST:
+          self.computeResourcePreferences = []
+          (_etype3, _size0) = iprot.readListBegin()
+          for _i4 in xrange(_size0):
+            _elem5 = ComputeResourcePreference()
+            _elem5.read(iprot)
+            self.computeResourcePreferences.append(_elem5)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('GatewayResourceProfile')
+    if self.gatewayID is not None:
+      oprot.writeFieldBegin('gatewayID', TType.STRING, 1)
+      oprot.writeString(self.gatewayID)
+      oprot.writeFieldEnd()
+    if self.computeResourcePreferences is not None:
+      oprot.writeFieldBegin('computeResourcePreferences', TType.LIST, 2)
+      oprot.writeListBegin(TType.STRUCT, len(self.computeResourcePreferences))
+      for iter6 in self.computeResourcePreferences:
+        iter6.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.gatewayID is None:
+      raise TProtocol.TProtocolException(message='Required field gatewayID is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/constants.py
new file mode 100644
index 0000000..35216c6
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/constants.py
@@ -0,0 +1,11 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/__init__.py
new file mode 100644
index 0000000..adefd8e
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/__init__.py
@@ -0,0 +1 @@
+__all__ = ['ttypes', 'constants']

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/constants.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/constants.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/constants.py
new file mode 100644
index 0000000..249ecf4
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/messaging/event/constants.py
@@ -0,0 +1,12 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+
+DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"


[2/8] airavata git commit: Adding python generated code - AIRAVATA-1642

Posted by sm...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/ttypes.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/ttypes.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/ttypes.py
new file mode 100644
index 0000000..25d2fb0
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/model/workspace/ttypes.py
@@ -0,0 +1,429 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+import apache.airavata.model.workspace.experiment.ttypes
+
+
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+
+class Group:
+  """
+  Attributes:
+   - groupName
+   - description
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'groupName', None, None, ), # 1
+    (2, TType.STRING, 'description', None, None, ), # 2
+  )
+
+  def __init__(self, groupName=None, description=None,):
+    self.groupName = groupName
+    self.description = description
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.groupName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.description = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('Group')
+    if self.groupName is not None:
+      oprot.writeFieldBegin('groupName', TType.STRING, 1)
+      oprot.writeString(self.groupName)
+      oprot.writeFieldEnd()
+    if self.description is not None:
+      oprot.writeFieldBegin('description', TType.STRING, 2)
+      oprot.writeString(self.description)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.groupName is None:
+      raise TProtocol.TProtocolException(message='Required field groupName is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class Project:
+  """
+  Attributes:
+   - projectID
+   - owner
+   - name
+   - description
+   - creationTime
+   - sharedUsers
+   - sharedGroups
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'projectID', None, "DEFAULT", ), # 1
+    (2, TType.STRING, 'owner', None, None, ), # 2
+    (3, TType.STRING, 'name', None, None, ), # 3
+    (4, TType.STRING, 'description', None, None, ), # 4
+    (5, TType.I64, 'creationTime', None, None, ), # 5
+    (6, TType.LIST, 'sharedUsers', (TType.STRING,None), None, ), # 6
+    (7, TType.LIST, 'sharedGroups', (TType.STRING,None), None, ), # 7
+  )
+
+  def __init__(self, projectID=thrift_spec[1][4], owner=None, name=None, description=None, creationTime=None, sharedUsers=None, sharedGroups=None,):
+    self.projectID = projectID
+    self.owner = owner
+    self.name = name
+    self.description = description
+    self.creationTime = creationTime
+    self.sharedUsers = sharedUsers
+    self.sharedGroups = sharedGroups
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.projectID = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.owner = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.name = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.description = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 5:
+        if ftype == TType.I64:
+          self.creationTime = iprot.readI64();
+        else:
+          iprot.skip(ftype)
+      elif fid == 6:
+        if ftype == TType.LIST:
+          self.sharedUsers = []
+          (_etype3, _size0) = iprot.readListBegin()
+          for _i4 in xrange(_size0):
+            _elem5 = iprot.readString();
+            self.sharedUsers.append(_elem5)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      elif fid == 7:
+        if ftype == TType.LIST:
+          self.sharedGroups = []
+          (_etype9, _size6) = iprot.readListBegin()
+          for _i10 in xrange(_size6):
+            _elem11 = iprot.readString();
+            self.sharedGroups.append(_elem11)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('Project')
+    if self.projectID is not None:
+      oprot.writeFieldBegin('projectID', TType.STRING, 1)
+      oprot.writeString(self.projectID)
+      oprot.writeFieldEnd()
+    if self.owner is not None:
+      oprot.writeFieldBegin('owner', TType.STRING, 2)
+      oprot.writeString(self.owner)
+      oprot.writeFieldEnd()
+    if self.name is not None:
+      oprot.writeFieldBegin('name', TType.STRING, 3)
+      oprot.writeString(self.name)
+      oprot.writeFieldEnd()
+    if self.description is not None:
+      oprot.writeFieldBegin('description', TType.STRING, 4)
+      oprot.writeString(self.description)
+      oprot.writeFieldEnd()
+    if self.creationTime is not None:
+      oprot.writeFieldBegin('creationTime', TType.I64, 5)
+      oprot.writeI64(self.creationTime)
+      oprot.writeFieldEnd()
+    if self.sharedUsers is not None:
+      oprot.writeFieldBegin('sharedUsers', TType.LIST, 6)
+      oprot.writeListBegin(TType.STRING, len(self.sharedUsers))
+      for iter12 in self.sharedUsers:
+        oprot.writeString(iter12)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    if self.sharedGroups is not None:
+      oprot.writeFieldBegin('sharedGroups', TType.LIST, 7)
+      oprot.writeListBegin(TType.STRING, len(self.sharedGroups))
+      for iter13 in self.sharedGroups:
+        oprot.writeString(iter13)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.projectID is None:
+      raise TProtocol.TProtocolException(message='Required field projectID is unset!')
+    if self.owner is None:
+      raise TProtocol.TProtocolException(message='Required field owner is unset!')
+    if self.name is None:
+      raise TProtocol.TProtocolException(message='Required field name is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class User:
+  """
+  Attributes:
+   - userName
+   - groupList
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'userName', None, None, ), # 1
+    (2, TType.LIST, 'groupList', (TType.STRUCT,(Group, Group.thrift_spec)), None, ), # 2
+  )
+
+  def __init__(self, userName=None, groupList=None,):
+    self.userName = userName
+    self.groupList = groupList
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.userName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.LIST:
+          self.groupList = []
+          (_etype17, _size14) = iprot.readListBegin()
+          for _i18 in xrange(_size14):
+            _elem19 = Group()
+            _elem19.read(iprot)
+            self.groupList.append(_elem19)
+          iprot.readListEnd()
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('User')
+    if self.userName is not None:
+      oprot.writeFieldBegin('userName', TType.STRING, 1)
+      oprot.writeString(self.userName)
+      oprot.writeFieldEnd()
+    if self.groupList is not None:
+      oprot.writeFieldBegin('groupList', TType.LIST, 2)
+      oprot.writeListBegin(TType.STRUCT, len(self.groupList))
+      for iter20 in self.groupList:
+        iter20.write(oprot)
+      oprot.writeListEnd()
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.userName is None:
+      raise TProtocol.TProtocolException(message='Required field userName is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)
+
+class Gateway:
+  """
+  Attributes:
+   - gatewayId
+   - gatewayName
+   - domain
+   - emailAddress
+  """
+
+  thrift_spec = (
+    None, # 0
+    (1, TType.STRING, 'gatewayId', None, None, ), # 1
+    (2, TType.STRING, 'gatewayName', None, None, ), # 2
+    (3, TType.STRING, 'domain', None, None, ), # 3
+    (4, TType.STRING, 'emailAddress', None, None, ), # 4
+  )
+
+  def __init__(self, gatewayId=None, gatewayName=None, domain=None, emailAddress=None,):
+    self.gatewayId = gatewayId
+    self.gatewayName = gatewayName
+    self.domain = domain
+    self.emailAddress = emailAddress
+
+  def read(self, iprot):
+    if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
+      fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
+      return
+    iprot.readStructBegin()
+    while True:
+      (fname, ftype, fid) = iprot.readFieldBegin()
+      if ftype == TType.STOP:
+        break
+      if fid == 1:
+        if ftype == TType.STRING:
+          self.gatewayId = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 2:
+        if ftype == TType.STRING:
+          self.gatewayName = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 3:
+        if ftype == TType.STRING:
+          self.domain = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      elif fid == 4:
+        if ftype == TType.STRING:
+          self.emailAddress = iprot.readString();
+        else:
+          iprot.skip(ftype)
+      else:
+        iprot.skip(ftype)
+      iprot.readFieldEnd()
+    iprot.readStructEnd()
+
+  def write(self, oprot):
+    if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
+      oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
+      return
+    oprot.writeStructBegin('Gateway')
+    if self.gatewayId is not None:
+      oprot.writeFieldBegin('gatewayId', TType.STRING, 1)
+      oprot.writeString(self.gatewayId)
+      oprot.writeFieldEnd()
+    if self.gatewayName is not None:
+      oprot.writeFieldBegin('gatewayName', TType.STRING, 2)
+      oprot.writeString(self.gatewayName)
+      oprot.writeFieldEnd()
+    if self.domain is not None:
+      oprot.writeFieldBegin('domain', TType.STRING, 3)
+      oprot.writeString(self.domain)
+      oprot.writeFieldEnd()
+    if self.emailAddress is not None:
+      oprot.writeFieldBegin('emailAddress', TType.STRING, 4)
+      oprot.writeString(self.emailAddress)
+      oprot.writeFieldEnd()
+    oprot.writeFieldStop()
+    oprot.writeStructEnd()
+
+  def validate(self):
+    if self.gatewayId is None:
+      raise TProtocol.TProtocolException(message='Required field gatewayId is unset!')
+    return
+
+
+  def __repr__(self):
+    L = ['%s=%r' % (key, value)
+      for key, value in self.__dict__.iteritems()]
+    return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
+
+  def __eq__(self, other):
+    return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
+
+  def __ne__(self, other):
+    return not (self == other)

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/pom.xml
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/pom.xml b/airavata-api/airavata-client-sdks/pom.xml
index 75ad93f..cd02c27 100644
--- a/airavata-api/airavata-client-sdks/pom.xml
+++ b/airavata-api/airavata-client-sdks/pom.xml
@@ -31,9 +31,10 @@
                 <activeByDefault>true</activeByDefault>
             </activation>
             <modules>
-		<module>airavata-php-sdk</module>
-		<module>airavata-cpp-sdk</module>
                 <module>java-client-samples</module>
+                <module>airavata-php-sdk</module>
+                <module>airavata-cpp-sdk</module>
+                <module>airavata-python-sdk</module>
             </modules>
         </profile>
     </profiles>

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/airavataDataModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/airavataDataModel.thrift b/airavata-api/thrift-interface-descriptions/airavataDataModel.thrift
index dd5ed51..0149f23 100644
--- a/airavata-api/thrift-interface-descriptions/airavataDataModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/airavataDataModel.thrift
@@ -25,6 +25,7 @@ include "messagingEvents.thrift"
 namespace java org.apache.airavata.model
 namespace php Airavata.Model
 namespace cpp apache.airavata.model
+namespace py apache.airavata.model
 
 /*
  * This file describes the definitions of the Airavata Execution Data Structures. Each of the

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/airavataErrors.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/airavataErrors.thrift b/airavata-api/thrift-interface-descriptions/airavataErrors.thrift
index 0a91adf..6ffd5d5 100644
--- a/airavata-api/thrift-interface-descriptions/airavataErrors.thrift
+++ b/airavata-api/thrift-interface-descriptions/airavataErrors.thrift
@@ -30,7 +30,7 @@ namespace java org.apache.airavata.model.error
 namespace php Airavata.API.Error
 namespace cpp apache.airavata.api.error
 namespace perl AiravataAPIError
-namespace py airavata.api.error
+namespace py apache.airavata.api.error
 namespace js AiravataAPIError
 
 /**

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/applicationDeploymentModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/applicationDeploymentModel.thrift b/airavata-api/thrift-interface-descriptions/applicationDeploymentModel.thrift
index d2171b5..a965e23 100644
--- a/airavata-api/thrift-interface-descriptions/applicationDeploymentModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/applicationDeploymentModel.thrift
@@ -27,6 +27,7 @@
 namespace java org.apache.airavata.model.appcatalog.appdeployment
 namespace php Airavata.Model.AppCatalog.AppDeployment
 namespace cpp apache.airavata.model.appcatalog.appdeployment
+namespace py apache.airavata.model.appcatalog.appdeployment
 
 const string DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift b/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift
index baeab2a..4c57009 100644
--- a/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/applicationInterfaceModel.thrift
@@ -27,6 +27,7 @@
 namespace java org.apache.airavata.model.appcatalog.appinterface
 namespace php Airavata.Model.AppCatalog.AppInterface
 namespace cpp apache.airavata.model.appcatalog.appinterface
+namespace py apache.airavata.model.appcatalog.appinterface
 
 const string DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift b/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
index d91210c..929e677 100644
--- a/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/computeResourceModel.thrift
@@ -21,6 +21,7 @@
 namespace java org.apache.airavata.model.appcatalog.computeresource
 namespace php Airavata.Model.AppCatalog.ComputeResource
 namespace cpp apache.airavata.model.appcatalog.computeresource
+namespace py apache.airavata.model.appcatalog.computeresource
 
 const string DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/experimentModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/experimentModel.thrift b/airavata-api/thrift-interface-descriptions/experimentModel.thrift
index 7f76f90..2cd82d3 100644
--- a/airavata-api/thrift-interface-descriptions/experimentModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/experimentModel.thrift
@@ -24,6 +24,7 @@ include "applicationInterfaceModel.thrift"
 namespace java org.apache.airavata.model.workspace.experiment
 namespace php Airavata.Model.Workspace.Experiment
 namespace cpp apache.airavata.model.workspace.experiment
+namespace py apache.airavata.model.workspace.experiment
 
 /*
  * This file describes the definitions of the Airavata Experiment Data Structures. Each of the

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift b/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift
index 3f384c6..11a6586 100644
--- a/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/gatewayResourceProfileModel.thrift
@@ -21,6 +21,8 @@
 namespace java org.apache.airavata.model.appcatalog.gatewayprofile
 namespace php Airavata.Model.AppCatalog.GatewayProfile
 namespace cpp apache.airavata.model.appcatalog.gatewayprofile
+namespace py apache.airavata.model.appcatalog.gatewayprofile
+
 include "computeResourceModel.thrift"
 
 /**

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/messagingEvents.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/messagingEvents.thrift b/airavata-api/thrift-interface-descriptions/messagingEvents.thrift
index d9e85d4..c2808a1 100644
--- a/airavata-api/thrift-interface-descriptions/messagingEvents.thrift
+++ b/airavata-api/thrift-interface-descriptions/messagingEvents.thrift
@@ -24,6 +24,7 @@ include "applicationInterfaceModel.thrift"
 namespace java org.apache.airavata.model.messaging.event
 namespace php Airavata.Model.Messaging.Event
 namespace cpp apache.airavata.model.messaging.event
+namespace py apache.airavata.model.messaging.event
 
 const string DEFAULT_ID = "DO_NOT_SET_AT_CLIENTS"
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/workflowAPI.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/workflowAPI.thrift b/airavata-api/thrift-interface-descriptions/workflowAPI.thrift
index 90d5b57..9aa4a88 100644
--- a/airavata-api/thrift-interface-descriptions/workflowAPI.thrift
+++ b/airavata-api/thrift-interface-descriptions/workflowAPI.thrift
@@ -37,10 +37,10 @@ namespace java org.apache.airavata.api.workflow
 namespace php Airavata.API.Workflow
 namespace cpp airavata.api.workflow
 namespace perl AiravataWorkflowAPI
-namespace py airavata.api.workflow
+namespace py apache.airavata.api.workflow
 namespace js AiravataWorkflowAPI
 
-const string AIRAVATA_API_VERSION = "0.13.0"
+const string AIRAVATA_API_VERSION = "0.15.0"
 
 service Workflow {
 

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/workflowDataModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/workflowDataModel.thrift b/airavata-api/thrift-interface-descriptions/workflowDataModel.thrift
index 58ba75b..68d60cd 100644
--- a/airavata-api/thrift-interface-descriptions/workflowDataModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/workflowDataModel.thrift
@@ -21,6 +21,8 @@
 
 namespace java org.apache.airavata.model
 namespace php Airavata.Model
+namespace py apache.airavata.model.workflow
+
 include "applicationInterfaceModel.thrift"
 
 /*

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/thrift-interface-descriptions/workspaceModel.thrift
----------------------------------------------------------------------
diff --git a/airavata-api/thrift-interface-descriptions/workspaceModel.thrift b/airavata-api/thrift-interface-descriptions/workspaceModel.thrift
index fd75985..803cea8 100644
--- a/airavata-api/thrift-interface-descriptions/workspaceModel.thrift
+++ b/airavata-api/thrift-interface-descriptions/workspaceModel.thrift
@@ -23,6 +23,7 @@ include "experimentModel.thrift"
 namespace java org.apache.airavata.model.workspace
 namespace php Airavata.Model.Workspace
 namespace cpp apache.airavata.model.workspace
+namespace py apache.airavata.model.workspace
 
 /*
  * This file describes the definitions of the Airavata Workspace. The workspace is a container for all user data


[7/8] airavata git commit: Adding python generated code - AIRAVATA-1642

Posted by sm...@apache.org.
http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/Airavata.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/Airavata.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/Airavata.py
new file mode 100644
index 0000000..ded8f77
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/Airavata.py
@@ -0,0 +1,29153 @@
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+from thrift.Thrift import TType, TMessageType, TException, TApplicationException
+from ttypes import *
+from thrift.Thrift import TProcessor
+from thrift.transport import TTransport
+from thrift.protocol import TBinaryProtocol, TProtocol
+try:
+  from thrift.protocol import fastbinary
+except:
+  fastbinary = None
+
+
+class Iface:
+  def getAPIVersion(self):
+    """
+    Fetch Apache Airavata API version
+    """
+    pass
+
+  def addGateway(self, gateway):
+    """
+    Parameters:
+     - gateway
+    """
+    pass
+
+  def updateGateway(self, gatewayId, updatedGateway):
+    """
+    Parameters:
+     - gatewayId
+     - updatedGateway
+    """
+    pass
+
+  def getGateway(self, gatewayId):
+    """
+    Parameters:
+     - gatewayId
+    """
+    pass
+
+  def deleteGateway(self, gatewayId):
+    """
+    Parameters:
+     - gatewayId
+    """
+    pass
+
+  def getAllGateways(self):
+    pass
+
+  def isGatewayExist(self, gatewayId):
+    """
+    Parameters:
+     - gatewayId
+    """
+    pass
+
+  def generateAndRegisterSSHKeys(self, gatewayId, userName):
+    """
+    Generate and Register SSH Key Pair with Airavata Credential Store.
+
+    @param gatewayId
+       The identifier for the requested gateway.
+
+    @param userName
+       The User for which the credential should be registered. For community accounts, this user is the name of the
+       community user name. For computational resources, this user name need not be the same user name on resoruces.
+
+    @return airavataCredStoreToken
+      An SSH Key pair is generated and stored in the credential store and associated with users or community account
+      belonging to a gateway.
+
+
+
+    Parameters:
+     - gatewayId
+     - userName
+    """
+    pass
+
+  def getSSHPubKey(self, airavataCredStoreToken):
+    """
+    Parameters:
+     - airavataCredStoreToken
+    """
+    pass
+
+  def getAllUserSSHPubKeys(self, userName):
+    """
+    Parameters:
+     - userName
+    """
+    pass
+
+  def createProject(self, gatewayId, project):
+    """
+    Creates a Project with basic metadata.
+       A Project is a container of experiments.
+
+    @param gatewayId
+       The identifier for the requested gateway.
+
+    @param Project
+       The Project Object described in the workspaceModel
+
+
+    Parameters:
+     - gatewayId
+     - project
+    """
+    pass
+
+  def updateProject(self, projectId, updatedProject):
+    """
+    Update a Project
+
+
+    Parameters:
+     - projectId
+     - updatedProject
+    """
+    pass
+
+  def getProject(self, projectId):
+    """
+    Get a Project by ID
+
+
+    Parameters:
+     - projectId
+    """
+    pass
+
+  def deleteProject(self, projectId):
+    """
+    Parameters:
+     - projectId
+    """
+    pass
+
+  def getAllUserProjects(self, gatewayId, userName):
+    """
+      * Get all Project by user
+      *
+      * @param gatewayId
+      *    The identifier for the requested gateway.
+      *
+      * @param userName
+      *    The Project Object described in the workspaceModel
+      *
+    *
+
+    Parameters:
+     - gatewayId
+     - userName
+    """
+    pass
+
+  def searchProjectsByProjectName(self, gatewayId, userName, projectName):
+    """
+    Get all Project for user by project name
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - projectName
+    """
+    pass
+
+  def searchProjectsByProjectDesc(self, gatewayId, userName, description):
+    """
+    Get all Project for user by project description
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - description
+    """
+    pass
+
+  def searchExperimentsByName(self, gatewayId, userName, expName):
+    """
+    Search Experiments by experiment name
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - expName
+    """
+    pass
+
+  def searchExperimentsByDesc(self, gatewayId, userName, description):
+    """
+    Search Experiments by experiment name
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - description
+    """
+    pass
+
+  def searchExperimentsByApplication(self, gatewayId, userName, applicationId):
+    """
+    Search Experiments by application id
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - applicationId
+    """
+    pass
+
+  def searchExperimentsByStatus(self, gatewayId, userName, experimentState):
+    """
+    Search Experiments by experiment status
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - experimentState
+    """
+    pass
+
+  def searchExperimentsByCreationTime(self, gatewayId, userName, fromTime, toTime):
+    """
+    Search Experiments by experiment status
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - fromTime
+     - toTime
+    """
+    pass
+
+  def getAllExperimentsInProject(self, projectId):
+    """
+    Get all Experiments within a Project
+
+
+    Parameters:
+     - projectId
+    """
+    pass
+
+  def getAllUserExperiments(self, gatewayId, userName):
+    """
+    Get all Experiments by user
+
+
+    Parameters:
+     - gatewayId
+     - userName
+    """
+    pass
+
+  def createExperiment(self, gatewayId, experiment):
+    """
+    Create an experiment for the specified user belonging to the gateway. The gateway identity is not explicitly passed
+      but inferred from the authentication header. This experiment is just a persistent place holder. The client
+      has to subsequently configure and launch the created experiment. No action is taken on Airavata Server except
+      registering the experiment in a persistent store.
+
+    @param basicExperimentMetadata
+       The create experiment will require the basic experiment metadata like the name and description, intended user,
+         the gateway identifer and if the experiment should be shared public by defualt. During the creation of an experiment
+         the ExperimentMetadata is a required field.
+
+    @return
+      The server-side generated airavata experiment globally unique identifier.
+
+    @throws org.apache.airavata.model.error.InvalidRequestException
+       For any incorrect forming of the request itself.
+
+    @throws org.apache.airavata.model.error.AiravataClientException
+       The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
+
+         UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
+            step, then Airavata Registry will not have a provenance area setup. The client has to follow
+            gateway registration steps and retry this request.
+
+         AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
+            For now this is a place holder.
+
+         INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
+            is implemented, the authorization will be more substantial.
+
+    @throws org.apache.airavata.model.error.AiravataSystemException
+       This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
+          rather an Airavata Administrator will be notified to take corrective action.
+
+
+    Parameters:
+     - gatewayId
+     - experiment
+    """
+    pass
+
+  def getExperiment(self, airavataExperimentId):
+    """
+    Fetch previously created experiment metadata.
+
+    @param airavataExperimentId
+       The identifier for the requested experiment. This is returned during the create experiment step.
+
+    @return experimentMetada
+      This method will return the previously stored experiment metadata.
+
+    @throws org.apache.airavata.model.error.InvalidRequestException
+       For any incorrect forming of the request itself.
+
+    @throws org.apache.airavata.model.error.ExperimentNotFoundException
+       If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
+
+    @throws org.apache.airavata.model.error.AiravataClientException
+       The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
+         
+         UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
+            step, then Airavata Registry will not have a provenance area setup. The client has to follow
+            gateway registration steps and retry this request.
+
+         AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
+            For now this is a place holder.
+
+         INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
+            is implemented, the authorization will be more substantial.
+
+    @throws org.apache.airavata.model.error.AiravataSystemException
+       This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
+          rather an Airavata Administrator will be notified to take corrective action.
+
+
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def updateExperiment(self, airavataExperimentId, experiment):
+    """
+    Configure a previously created experiment with required inputs, scheduling and other quality of service
+      parameters. This method only updates the experiment object within the registry. The experiment has to be launched
+      to make it actionable by the server.
+
+    @param airavataExperimentId
+       The identifier for the requested experiment. This is returned during the create experiment step.
+
+    @param experimentConfigurationData
+       The configuration information of the experiment with application input parameters, computational resource scheduling
+         information, special input output handling and additional quality of service parameters.
+
+    @return
+      This method call does not have a return value.
+
+    @throws org.apache.airavata.model.error.InvalidRequestException
+       For any incorrect forming of the request itself.
+
+    @throws org.apache.airavata.model.error.ExperimentNotFoundException
+       If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
+
+    @throws org.apache.airavata.model.error.AiravataClientException
+       The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
+         
+         UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
+            step, then Airavata Registry will not have a provenance area setup. The client has to follow
+            gateway registration steps and retry this request.
+
+         AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
+            For now this is a place holder.
+
+         INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
+            is implemented, the authorization will be more substantial.
+
+    @throws org.apache.airavata.model.error.AiravataSystemException
+       This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
+          rather an Airavata Administrator will be notified to take corrective action.
+
+
+    Parameters:
+     - airavataExperimentId
+     - experiment
+    """
+    pass
+
+  def updateExperimentConfiguration(self, airavataExperimentId, userConfiguration):
+    """
+    Parameters:
+     - airavataExperimentId
+     - userConfiguration
+    """
+    pass
+
+  def updateResourceScheduleing(self, airavataExperimentId, resourceScheduling):
+    """
+    Parameters:
+     - airavataExperimentId
+     - resourceScheduling
+    """
+    pass
+
+  def validateExperiment(self, airavataExperimentId):
+    """
+     *
+     * Validate experiment configuration. A true in general indicates, the experiment is ready to be launched.
+     *
+     * @param experimentID
+     * @return sucess/failure
+     *
+    *
+
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def launchExperiment(self, airavataExperimentId, airavataCredStoreToken):
+    """
+    Launch a previously created and configured experiment. Airavata Server will then start processing the request and appropriate
+      notifications and intermediate and output data will be subsequently available for this experiment.
+
+    @param airavataExperimentId
+       The identifier for the requested experiment. This is returned during the create experiment step.
+
+    @param airavataCredStoreToken:
+      A requirement to execute experiments within Airavata is to first register the targeted remote computational account
+        credentials with Airavata Credential Store. The administrative API (related to credential store) will return a
+        generated token associated with the registered credentials. The client has to security posses this token id and is
+        required to pass it to Airavata Server for all execution requests.
+      Note: At this point only the credential store token is required so the string is directly passed here. In future if
+        if more security credentials are enables, then the structure ExecutionSecurityParameters should be used.
+      Note: This parameter is not persisted within Airavata Registry for security reasons.
+
+    @return
+      This method call does not have a return value.
+
+    @throws org.apache.airavata.model.error.InvalidRequestException
+       For any incorrect forming of the request itself.
+
+    @throws org.apache.airavata.model.error.ExperimentNotFoundException
+       If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
+
+    @throws org.apache.airavata.model.error.AiravataClientException
+       The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
+         
+         UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
+            step, then Airavata Registry will not have a provenance area setup. The client has to follow
+            gateway registration steps and retry this request.
+
+         AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
+            For now this is a place holder.
+
+         INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
+            is implemented, the authorization will be more substantial.
+
+    @throws org.apache.airavata.model.error.AiravataSystemException
+       This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
+          rather an Airavata Administrator will be notified to take corrective action.
+
+
+    Parameters:
+     - airavataExperimentId
+     - airavataCredStoreToken
+    """
+    pass
+
+  def getExperimentStatus(self, airavataExperimentId):
+    """
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def getExperimentOutputs(self, airavataExperimentId):
+    """
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def getIntermediateOutputs(self, airavataExperimentId):
+    """
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def getJobStatuses(self, airavataExperimentId):
+    """
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def getJobDetails(self, airavataExperimentId):
+    """
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def getDataTransferDetails(self, airavataExperimentId):
+    """
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def cloneExperiment(self, existingExperimentID, newExperimentName):
+    """
+    Clone an specified experiment with a new name. A copy of the experiment configuration is made and is persisted with new metadata.
+      The client has to subsequently update this configuration if needed and launch the cloned experiment.
+
+    @param newExperimentName
+       experiment name that should be used in the cloned experiment
+
+    @param updatedExperiment
+       Once an experiment is cloned, to disambiguate, the users are suggested to provide new metadata. This will again require
+         the basic experiment metadata like the name and description, intended user, the gateway identifier and if the experiment
+         should be shared public by default.
+
+    @return
+      The server-side generated airavata experiment globally unique identifier for the newly cloned experiment.
+
+    @throws org.apache.airavata.model.error.InvalidRequestException
+       For any incorrect forming of the request itself.
+
+    @throws org.apache.airavata.model.error.ExperimentNotFoundException
+       If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
+
+    @throws org.apache.airavata.model.error.AiravataClientException
+       The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
+         
+         UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
+            step, then Airavata Registry will not have a provenance area setup. The client has to follow
+            gateway registration steps and retry this request.
+
+         AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
+            For now this is a place holder.
+
+         INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
+            is implemented, the authorization will be more substantial.
+
+    @throws org.apache.airavata.model.error.AiravataSystemException
+       This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
+          rather an Airavata Administrator will be notified to take corrective action.
+
+
+    Parameters:
+     - existingExperimentID
+     - newExperimentName
+    """
+    pass
+
+  def terminateExperiment(self, airavataExperimentId):
+    """
+    Terminate a running experiment.
+
+    @param airavataExperimentId
+       The identifier for the requested experiment. This is returned during the create experiment step.
+
+    @return
+      This method call does not have a return value.
+
+    @throws org.apache.airavata.model.error.InvalidRequestException
+       For any incorrect forming of the request itself.
+
+    @throws org.apache.airavata.model.error.ExperimentNotFoundException
+       If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
+
+    @throws org.apache.airavata.model.error.AiravataClientException
+       The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
+         
+         UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
+            step, then Airavata Registry will not have a provenance area setup. The client has to follow
+            gateway registration steps and retry this request.
+
+         AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
+            For now this is a place holder.
+
+         INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
+            is implemented, the authorization will be more substantial.
+
+    @throws org.apache.airavata.model.error.AiravataSystemException
+       This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
+          rather an Airavata Administrator will be notified to take corrective action.
+
+
+    Parameters:
+     - airavataExperimentId
+    """
+    pass
+
+  def registerApplicationModule(self, gatewayId, applicationModule):
+    """
+    Register a Application Module.
+
+    @param applicationModule
+       Application Module Object created from the datamodel.
+
+    @return appModuleId
+      Returns a server-side generated airavata appModule globally unique identifier.
+
+
+    Parameters:
+     - gatewayId
+     - applicationModule
+    """
+    pass
+
+  def getApplicationModule(self, appModuleId):
+    """
+    Fetch a Application Module.
+
+    @param appModuleId
+      The identifier for the requested application module
+
+    @return applicationModule
+      Returns a application Module Object.
+
+
+    Parameters:
+     - appModuleId
+    """
+    pass
+
+  def updateApplicationModule(self, appModuleId, applicationModule):
+    """
+    Update a Application Module.
+
+    @param appModuleId
+      The identifier for the requested application module to be updated.
+
+    @param applicationModule
+       Application Module Object created from the datamodel.
+
+    @return status
+      Returns a success/failure of the update.
+
+
+    Parameters:
+     - appModuleId
+     - applicationModule
+    """
+    pass
+
+  def getAllAppModules(self, gatewayId):
+    """
+    Parameters:
+     - gatewayId
+    """
+    pass
+
+  def deleteApplicationModule(self, appModuleId):
+    """
+    Delete a Application Module.
+
+    @param appModuleId
+      The identifier for the requested application module to be deleted.
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - appModuleId
+    """
+    pass
+
+  def registerApplicationDeployment(self, gatewayId, applicationDeployment):
+    """
+    Register a Application Deployment.
+
+    @param applicationModule
+       Application Module Object created from the datamodel.
+
+    @return appDeploymentId
+      Returns a server-side generated airavata appDeployment globally unique identifier.
+
+
+    Parameters:
+     - gatewayId
+     - applicationDeployment
+    """
+    pass
+
+  def getApplicationDeployment(self, appDeploymentId):
+    """
+    Fetch a Application Deployment.
+
+    @param appDeploymentId
+      The identifier for the requested application module
+
+    @return applicationDeployment
+      Returns a application Deployment Object.
+
+
+    Parameters:
+     - appDeploymentId
+    """
+    pass
+
+  def updateApplicationDeployment(self, appDeploymentId, applicationDeployment):
+    """
+    Update a Application Deployment.
+
+    @param appDeploymentId
+      The identifier for the requested application deployment to be updated.
+
+    @param appDeployment
+       Application Deployment Object created from the datamodel.
+
+    @return status
+      Returns a success/failure of the update.
+
+
+    Parameters:
+     - appDeploymentId
+     - applicationDeployment
+    """
+    pass
+
+  def deleteApplicationDeployment(self, appDeploymentId):
+    """
+    Delete a Application deployment.
+
+    @param appDeploymentId
+      The identifier for the requested application deployment to be deleted.
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - appDeploymentId
+    """
+    pass
+
+  def getAllApplicationDeployments(self, gatewayId):
+    """
+    Fetch all Application Deployment Descriptions.
+
+    @return list<applicationDeployment.
+      Returns the list of all application Deployment Objects.
+
+
+    Parameters:
+     - gatewayId
+    """
+    pass
+
+  def getAppModuleDeployedResources(self, appModuleId):
+    """
+    Fetch a list of Deployed Compute Hosts.
+
+    @param appModuleId
+      The identifier for the requested application module
+
+    @return list<string>
+      Returns a list of Deployed Resources.
+
+
+    Parameters:
+     - appModuleId
+    """
+    pass
+
+  def registerApplicationInterface(self, gatewayId, applicationInterface):
+    """
+    Register a Application Interface.
+
+    @param applicationModule
+       Application Module Object created from the datamodel.
+
+    @return appInterfaceId
+      Returns a server-side generated airavata application interface globally unique identifier.
+
+
+    Parameters:
+     - gatewayId
+     - applicationInterface
+    """
+    pass
+
+  def getApplicationInterface(self, appInterfaceId):
+    """
+    Fetch a Application Interface.
+
+    @param appInterfaceId
+      The identifier for the requested application module
+
+    @return applicationInterface
+      Returns a application Interface Object.
+
+
+
+    Parameters:
+     - appInterfaceId
+    """
+    pass
+
+  def updateApplicationInterface(self, appInterfaceId, applicationInterface):
+    """
+    Update a Application Interface.
+
+    @param appInterfaceId
+      The identifier for the requested application deployment to be updated.
+
+    @param appInterface
+       Application Interface Object created from the datamodel.
+
+    @return status
+      Returns a success/failure of the update.
+
+
+
+    Parameters:
+     - appInterfaceId
+     - applicationInterface
+    """
+    pass
+
+  def deleteApplicationInterface(self, appInterfaceId):
+    """
+    Delete a Application Interface.
+
+    @param appInterfaceId
+      The identifier for the requested application interface to be deleted.
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+
+    Parameters:
+     - appInterfaceId
+    """
+    pass
+
+  def getAllApplicationInterfaceNames(self, gatewayId):
+    """
+    Fetch name and id of  Application Interface documents.
+
+
+    @return map<applicationId, applicationInterfaceNames>
+      Returns a list of application interfaces with corresponsing id's
+
+
+    Parameters:
+     - gatewayId
+    """
+    pass
+
+  def getAllApplicationInterfaces(self, gatewayId):
+    """
+    Fetch all Application Interface documents.
+
+
+    @return map<applicationId, applicationInterfaceNames>
+      Returns a list of application interfaces documents
+
+
+    Parameters:
+     - gatewayId
+    """
+    pass
+
+  def getApplicationInputs(self, appInterfaceId):
+    """
+    Fetch the list of Application Inputs.
+
+    @param appInterfaceId
+      The identifier for the requested application interface
+
+    @return list<applicationInterfaceModel.InputDataObjectType>
+      Returns a list of application inputs.
+
+
+    Parameters:
+     - appInterfaceId
+    """
+    pass
+
+  def getApplicationOutputs(self, appInterfaceId):
+    """
+    Fetch the list of Application Outputs.
+
+    @param appInterfaceId
+      The identifier for the requested application interface
+
+    @return list<applicationInterfaceModel.OutputDataObjectType>
+      Returns a list of application outputs.
+
+
+    Parameters:
+     - appInterfaceId
+    """
+    pass
+
+  def getAvailableAppInterfaceComputeResources(self, appInterfaceId):
+    """
+    Fetch a list of all deployed Compute Hosts for a given application interfaces.
+
+    @param appInterfaceId
+      The identifier for the requested application interface
+
+    @return map<computeResourceId, computeResourceName>
+      A map of registered compute resource id's and their corresponding hostnames.
+       Deployments of each modules listed within the interfaces will be listed.
+
+
+    Parameters:
+     - appInterfaceId
+    """
+    pass
+
+  def registerComputeResource(self, computeResourceDescription):
+    """
+    Register a Compute Resource.
+
+    @param computeResourceDescription
+       Compute Resource Object created from the datamodel.
+
+    @return computeResourceId
+      Returns a server-side generated airavata compute resource globally unique identifier.
+
+
+    Parameters:
+     - computeResourceDescription
+    """
+    pass
+
+  def getComputeResource(self, computeResourceId):
+    """
+    Fetch the given Compute Resource.
+
+    @param computeResourceId
+      The identifier for the requested compute resource
+
+    @return computeResourceDescription
+       Compute Resource Object created from the datamodel..
+
+
+    Parameters:
+     - computeResourceId
+    """
+    pass
+
+  def getAllComputeResourceNames(self):
+    """
+    Fetch all registered Compute Resources.
+
+    @return A map of registered compute resource id's and thier corresponding hostnames.
+       Compute Resource Object created from the datamodel..
+
+    """
+    pass
+
+  def updateComputeResource(self, computeResourceId, computeResourceDescription):
+    """
+    Update a Compute Resource.
+
+    @param computeResourceId
+      The identifier for the requested compute resource to be updated.
+
+    @param computeResourceDescription
+       Compute Resource Object created from the datamodel.
+
+    @return status
+      Returns a success/failure of the update.
+
+
+    Parameters:
+     - computeResourceId
+     - computeResourceDescription
+    """
+    pass
+
+  def deleteComputeResource(self, computeResourceId):
+    """
+    Delete a Compute Resource.
+
+    @param computeResourceId
+      The identifier for the requested compute resource to be deleted.
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - computeResourceId
+    """
+    pass
+
+  def addLocalSubmissionDetails(self, computeResourceId, priorityOrder, localSubmission):
+    """
+    Add a Local Job Submission details to a compute resource
+     App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
+
+    @param computeResourceId
+      The identifier of the compute resource to which JobSubmission protocol to be added
+
+    @param priorityOrder
+      Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
+
+    @param localSubmission
+      The LOCALSubmission object to be added to the resource.
+
+    @return status
+      Returns the unique job submission id.
+
+
+    Parameters:
+     - computeResourceId
+     - priorityOrder
+     - localSubmission
+    """
+    pass
+
+  def updateLocalSubmissionDetails(self, jobSubmissionInterfaceId, localSubmission):
+    """
+    Update the given Local Job Submission details
+
+    @param jobSubmissionInterfaceId
+      The identifier of the JobSubmission Interface to be updated.
+
+    @param localSubmission
+      The LOCALSubmission object to be updated.
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - jobSubmissionInterfaceId
+     - localSubmission
+    """
+    pass
+
+  def getLocalJobSubmission(self, jobSubmissionId):
+    """
+    This method returns localJobSubmission object
+    @param jobSubmissionInterfaceId
+      The identifier of the JobSubmission Interface to be retrieved.
+     @return LOCALSubmission instance
+
+
+    Parameters:
+     - jobSubmissionId
+    """
+    pass
+
+  def addSSHJobSubmissionDetails(self, computeResourceId, priorityOrder, sshJobSubmission):
+    """
+    Add a SSH Job Submission details to a compute resource
+     App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
+
+    @param computeResourceId
+      The identifier of the compute resource to which JobSubmission protocol to be added
+
+    @param priorityOrder
+      Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
+
+    @param sshJobSubmission
+      The SSHJobSubmission object to be added to the resource.
+
+    @return status
+      Returns the unique job submission id.
+
+
+    Parameters:
+     - computeResourceId
+     - priorityOrder
+     - sshJobSubmission
+    """
+    pass
+
+  def getSSHJobSubmission(self, jobSubmissionId):
+    """
+    This method returns SSHJobSubmission object
+    @param jobSubmissionInterfaceId
+      The identifier of the JobSubmission Interface to be retrieved.
+     @return SSHJobSubmission instance
+
+
+    Parameters:
+     - jobSubmissionId
+    """
+    pass
+
+  def addUNICOREJobSubmissionDetails(self, computeResourceId, priorityOrder, unicoreJobSubmission):
+    """
+    Add a UNICORE Job Submission details to a compute resource
+     App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
+
+    @param computeResourceId
+      The identifier of the compute resource to which JobSubmission protocol to be added
+
+    @param priorityOrder
+      Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
+
+    @param unicoreJobSubmission
+      The UnicoreJobSubmission object to be added to the resource.
+
+    @return status
+     Returns the unique job submission id.
+
+
+    Parameters:
+     - computeResourceId
+     - priorityOrder
+     - unicoreJobSubmission
+    """
+    pass
+
+  def getUnicoreJobSubmission(self, jobSubmissionId):
+    """
+      * This method returns UnicoreJobSubmission object
+      * @param jobSubmissionInterfaceId
+      *   The identifier of the JobSubmission Interface to be retrieved.
+      *  @return UnicoreJobSubmission instance
+    *
+
+    Parameters:
+     - jobSubmissionId
+    """
+    pass
+
+  def addCloudJobSubmissionDetails(self, computeResourceId, priorityOrder, cloudSubmission):
+    """
+       * Add a Cloud Job Submission details to a compute resource
+       *  App catalog will return a jobSubmissionInterfaceId which will be added to the jobSubmissionInterfaces.
+       *
+       * @param computeResourceId
+       *   The identifier of the compute resource to which JobSubmission protocol to be added
+       *
+       * @param priorityOrder
+       *   Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
+       *
+       * @param sshJobSubmission
+       *   The SSHJobSubmission object to be added to the resource.
+       *
+       * @return status
+       *   Returns the unique job submission id.
+    *
+
+    Parameters:
+     - computeResourceId
+     - priorityOrder
+     - cloudSubmission
+    """
+    pass
+
+  def getCloudJobSubmission(self, jobSubmissionId):
+    """
+       * This method returns cloudJobSubmission object
+       * @param jobSubmissionInterfaceI
+           *   The identifier of the JobSubmission Interface to be retrieved.
+       *  @return CloudJobSubmission instance
+    *
+
+    Parameters:
+     - jobSubmissionId
+    """
+    pass
+
+  def updateSSHJobSubmissionDetails(self, jobSubmissionInterfaceId, sshJobSubmission):
+    """
+    Update the given SSH Job Submission details
+
+    @param jobSubmissionInterfaceId
+      The identifier of the JobSubmission Interface to be updated.
+
+    @param sshJobSubmission
+      The SSHJobSubmission object to be updated.
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - jobSubmissionInterfaceId
+     - sshJobSubmission
+    """
+    pass
+
+  def updateCloudJobSubmissionDetails(self, jobSubmissionInterfaceId, sshJobSubmission):
+    """
+    Update the given SSH Job Submission details
+
+    @param jobSubmissionInterfaceId
+      The identifier of the JobSubmission Interface to be updated.
+
+    @param cloudJobSubmission
+      The CloudJobSubmission object to be updated.
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - jobSubmissionInterfaceId
+     - sshJobSubmission
+    """
+    pass
+
+  def updateUnicoreJobSubmissionDetails(self, jobSubmissionInterfaceId, unicoreJobSubmission):
+    """
+    Parameters:
+     - jobSubmissionInterfaceId
+     - unicoreJobSubmission
+    """
+    pass
+
+  def addLocalDataMovementDetails(self, computeResourceId, priorityOrder, localDataMovement):
+    """
+    Add a Local data movement details to a compute resource
+     App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
+
+    @param computeResourceId
+      The identifier of the compute resource to which JobSubmission protocol to be added
+
+    @param priorityOrder
+      Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
+
+    @param localDataMovement
+      The LOCALDataMovement object to be added to the resource.
+
+    @return status
+      Returns the unique job submission id.
+
+
+    Parameters:
+     - computeResourceId
+     - priorityOrder
+     - localDataMovement
+    """
+    pass
+
+  def updateLocalDataMovementDetails(self, dataMovementInterfaceId, localDataMovement):
+    """
+    Update the given Local data movement details
+
+    @param dataMovementInterfaceId
+      The identifier of the data movement Interface to be updated.
+
+    @param localDataMovement
+      The LOCALDataMovement object to be updated.
+
+    @return status
+      Returns a success/failure of the update.
+
+
+    Parameters:
+     - dataMovementInterfaceId
+     - localDataMovement
+    """
+    pass
+
+  def getLocalDataMovement(self, dataMovementId):
+    """
+            * This method returns local datamovement object
+            * @param dataMovementId
+            *   The identifier of the datamovement Interface to be retrieved.
+            *  @return LOCALDataMovement instance
+    *
+
+    Parameters:
+     - dataMovementId
+    """
+    pass
+
+  def addSCPDataMovementDetails(self, computeResourceId, priorityOrder, scpDataMovement):
+    """
+    Add a SCP data movement details to a compute resource
+     App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
+
+    @param computeResourceId
+      The identifier of the compute resource to which JobSubmission protocol to be added
+
+    @param priorityOrder
+      Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
+
+    @param scpDataMovement
+      The SCPDataMovement object to be added to the resource.
+
+    @return status
+      Returns the unique job submission id.
+
+
+    Parameters:
+     - computeResourceId
+     - priorityOrder
+     - scpDataMovement
+    """
+    pass
+
+  def updateSCPDataMovementDetails(self, dataMovementInterfaceId, scpDataMovement):
+    """
+    Update the given scp data movement details
+     App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
+
+    @param dataMovementInterfaceId
+      The identifier of the data movement Interface to be updated.
+
+    @param scpDataMovement
+      The SCPDataMovement object to be updated.
+
+    @return status
+      Returns a success/failure of the update.
+
+
+    Parameters:
+     - dataMovementInterfaceId
+     - scpDataMovement
+    """
+    pass
+
+  def getSCPDataMovement(self, dataMovementId):
+    """
+      * This method returns SCP datamovement object
+      * @param dataMovementId
+         *   The identifier of the datamovement Interface to be retrieved.
+         *  @return SCPDataMovement instance
+    *
+
+    Parameters:
+     - dataMovementId
+    """
+    pass
+
+  def addUnicoreDataMovementDetails(self, computeResourceId, priorityOrder, unicoreDataMovement):
+    """
+    Parameters:
+     - computeResourceId
+     - priorityOrder
+     - unicoreDataMovement
+    """
+    pass
+
+  def updateUnicoreDataMovementDetails(self, dataMovementInterfaceId, unicoreDataMovement):
+    """
+    Parameters:
+     - dataMovementInterfaceId
+     - unicoreDataMovement
+    """
+    pass
+
+  def getUnicoreDataMovement(self, dataMovementId):
+    """
+    Parameters:
+     - dataMovementId
+    """
+    pass
+
+  def addGridFTPDataMovementDetails(self, computeResourceId, priorityOrder, gridFTPDataMovement):
+    """
+    Add a GridFTP data movement details to a compute resource
+     App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
+
+    @param computeResourceId
+      The identifier of the compute resource to which JobSubmission protocol to be added
+
+    @param priorityOrder
+      Specify the priority of this job manager. If this is the only jobmanager, the priority can be zero.
+
+    @param gridFTPDataMovement
+      The GridFTPDataMovement object to be added to the resource.
+
+    @return status
+      Returns the unique job submission id.
+
+
+    Parameters:
+     - computeResourceId
+     - priorityOrder
+     - gridFTPDataMovement
+    """
+    pass
+
+  def updateGridFTPDataMovementDetails(self, dataMovementInterfaceId, gridFTPDataMovement):
+    """
+    Update the given GridFTP data movement details to a compute resource
+     App catalog will return a dataMovementInterfaceId which will be added to the dataMovementInterfaces.
+
+    @param dataMovementInterfaceId
+      The identifier of the data movement Interface to be updated.
+
+    @param gridFTPDataMovement
+      The GridFTPDataMovement object to be updated.
+
+    @return status
+      Returns a success/failure of the updation.
+
+
+    Parameters:
+     - dataMovementInterfaceId
+     - gridFTPDataMovement
+    """
+    pass
+
+  def getGridFTPDataMovement(self, dataMovementId):
+    """
+      * This method returns GridFTP datamovement object
+      * @param dataMovementId
+         *   The identifier of the datamovement Interface to be retrieved.
+      *  @return GridFTPDataMovement instance
+    *
+
+    Parameters:
+     - dataMovementId
+    """
+    pass
+
+  def changeJobSubmissionPriority(self, jobSubmissionInterfaceId, newPriorityOrder):
+    """
+    Change the priority of a given job submisison interface
+
+    @param jobSubmissionInterfaceId
+      The identifier of the JobSubmission Interface to be changed
+
+    @param priorityOrder
+      The new priority of the job manager interface.
+
+    @return status
+      Returns a success/failure of the change.
+
+
+    Parameters:
+     - jobSubmissionInterfaceId
+     - newPriorityOrder
+    """
+    pass
+
+  def changeDataMovementPriority(self, dataMovementInterfaceId, newPriorityOrder):
+    """
+    Change the priority of a given data movement interface
+
+    @param dataMovementInterfaceId
+      The identifier of the DataMovement Interface to be changed
+
+    @param priorityOrder
+      The new priority of the data movement interface.
+
+    @return status
+      Returns a success/failure of the change.
+
+
+    Parameters:
+     - dataMovementInterfaceId
+     - newPriorityOrder
+    """
+    pass
+
+  def changeJobSubmissionPriorities(self, jobSubmissionPriorityMap):
+    """
+    Change the priorities of a given set of job submission interfaces
+
+    @param jobSubmissionPriorityMap
+      A Map of identifiers of the JobSubmission Interfaces and thier associated priorities to be set.
+
+    @return status
+      Returns a success/failure of the changes.
+
+
+    Parameters:
+     - jobSubmissionPriorityMap
+    """
+    pass
+
+  def changeDataMovementPriorities(self, dataMovementPriorityMap):
+    """
+    Change the priorities of a given set of data movement interfaces
+
+    @param dataMovementPriorityMap
+      A Map of identifiers of the DataMovement Interfaces and thier associated priorities to be set.
+
+    @return status
+      Returns a success/failure of the changes.
+
+
+    Parameters:
+     - dataMovementPriorityMap
+    """
+    pass
+
+  def deleteJobSubmissionInterface(self, computeResourceId, jobSubmissionInterfaceId):
+    """
+    Delete a given job submisison interface
+
+    @param jobSubmissionInterfaceId
+      The identifier of the JobSubmission Interface to be changed
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - computeResourceId
+     - jobSubmissionInterfaceId
+    """
+    pass
+
+  def deleteDataMovementInterface(self, computeResourceId, dataMovementInterfaceId):
+    """
+    Delete a given data movement interface
+
+    @param dataMovementInterfaceId
+      The identifier of the DataMovement Interface to be changed
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - computeResourceId
+     - dataMovementInterfaceId
+    """
+    pass
+
+  def registerResourceJobManager(self, resourceJobManager):
+    """
+    Parameters:
+     - resourceJobManager
+    """
+    pass
+
+  def updateResourceJobManager(self, resourceJobManagerId, updatedResourceJobManager):
+    """
+    Parameters:
+     - resourceJobManagerId
+     - updatedResourceJobManager
+    """
+    pass
+
+  def getResourceJobManager(self, resourceJobManagerId):
+    """
+    Parameters:
+     - resourceJobManagerId
+    """
+    pass
+
+  def deleteResourceJobManager(self, resourceJobManagerId):
+    """
+    Parameters:
+     - resourceJobManagerId
+    """
+    pass
+
+  def deleteBatchQueue(self, computeResourceId, queueName):
+    """
+    Parameters:
+     - computeResourceId
+     - queueName
+    """
+    pass
+
+  def registerGatewayResourceProfile(self, gatewayResourceProfile):
+    """
+    Register a Gateway Resource Profile.
+
+    @param gatewayResourceProfile
+       Gateway Resource Profile Object.
+       The GatewayID should be obtained from Airavata gateway registration and passed to register a corresponding
+         resource profile.
+
+    @return status
+      Returns a success/failure of the update.
+
+
+    Parameters:
+     - gatewayResourceProfile
+    """
+    pass
+
+  def getGatewayResourceProfile(self, gatewayID):
+    """
+    Fetch the given Gateway Resource Profile.
+
+    @param gatewayID
+      The identifier for the requested gateway resource
+
+    @return gatewayResourceProfile
+       Gateway Resource Profile Object.
+
+
+    Parameters:
+     - gatewayID
+    """
+    pass
+
+  def updateGatewayResourceProfile(self, gatewayID, gatewayResourceProfile):
+    """
+    Update a Gateway Resource Profile.
+
+    @param gatewayID
+      The identifier for the requested gateway resource to be updated.
+
+    @param gatewayResourceProfile
+       Gateway Resource Profile Object.
+
+    @return status
+      Returns a success/failure of the update.
+
+
+    Parameters:
+     - gatewayID
+     - gatewayResourceProfile
+    """
+    pass
+
+  def deleteGatewayResourceProfile(self, gatewayID):
+    """
+    Delete the given Gateway Resource Profile.
+
+    @param gatewayID
+      The identifier for the requested gateway resource to be deleted.
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - gatewayID
+    """
+    pass
+
+  def addGatewayComputeResourcePreference(self, gatewayID, computeResourceId, computeResourcePreference):
+    """
+    Add a Compute Resource Preference to a registered gateway profile.
+
+    @param gatewayID
+      The identifier for the gateway profile to be added.
+
+    @param computeResourceId
+      Preferences related to a particular compute resource
+
+    @param computeResourcePreference
+      The ComputeResourcePreference object to be added to the resource profile.
+
+    @return status
+      Returns a success/failure of the addition. If a profile already exists, this operation will fail.
+       Instead an update should be used.
+
+
+    Parameters:
+     - gatewayID
+     - computeResourceId
+     - computeResourcePreference
+    """
+    pass
+
+  def getGatewayComputeResourcePreference(self, gatewayID, computeResourceId):
+    """
+    Fetch a Compute Resource Preference of a registered gateway profile.
+
+    @param gatewayID
+      The identifier for the gateway profile to be requested
+
+    @param computeResourceId
+      Preferences related to a particular compute resource
+
+    @return computeResourcePreference
+      Returns the ComputeResourcePreference object.
+
+
+    Parameters:
+     - gatewayID
+     - computeResourceId
+    """
+    pass
+
+  def getAllGatewayComputeResourcePreferences(self, gatewayID):
+    """
+    Fetch all Compute Resource Preferences of a registered gateway profile.
+
+    @param gatewayID
+      The identifier for the gateway profile to be requested
+
+    @return computeResourcePreference
+      Returns the ComputeResourcePreference object.
+
+
+    Parameters:
+     - gatewayID
+    """
+    pass
+
+  def getAllGatewayComputeResources(self):
+    """
+    Fetch all gateway profiles registered
+
+    """
+    pass
+
+  def updateGatewayComputeResourcePreference(self, gatewayID, computeResourceId, computeResourcePreference):
+    """
+    Update a Compute Resource Preference to a registered gateway profile.
+
+    @param gatewayID
+      The identifier for the gateway profile to be updated.
+
+    @param computeResourceId
+      Preferences related to a particular compute resource
+
+    @param computeResourcePreference
+      The ComputeResourcePreference object to be updated to the resource profile.
+
+    @return status
+      Returns a success/failure of the updation.
+
+
+    Parameters:
+     - gatewayID
+     - computeResourceId
+     - computeResourcePreference
+    """
+    pass
+
+  def deleteGatewayComputeResourcePreference(self, gatewayID, computeResourceId):
+    """
+    Delete the Compute Resource Preference of a registered gateway profile.
+
+    @param gatewayID
+      The identifier for the gateway profile to be deleted.
+
+    @param computeResourceId
+      Preferences related to a particular compute resource
+
+    @return status
+      Returns a success/failure of the deletion.
+
+
+    Parameters:
+     - gatewayID
+     - computeResourceId
+    """
+    pass
+
+  def getAllWorkflows(self, gatewayId):
+    """
+    Parameters:
+     - gatewayId
+    """
+    pass
+
+  def getWorkflow(self, workflowTemplateId):
+    """
+    Parameters:
+     - workflowTemplateId
+    """
+    pass
+
+  def deleteWorkflow(self, workflowTemplateId):
+    """
+    Parameters:
+     - workflowTemplateId
+    """
+    pass
+
+  def registerWorkflow(self, gatewayId, workflow):
+    """
+    Parameters:
+     - gatewayId
+     - workflow
+    """
+    pass
+
+  def updateWorkflow(self, workflowTemplateId, workflow):
+    """
+    Parameters:
+     - workflowTemplateId
+     - workflow
+    """
+    pass
+
+  def getWorkflowTemplateId(self, workflowName):
+    """
+    Parameters:
+     - workflowName
+    """
+    pass
+
+  def isWorkflowExistWithName(self, workflowName):
+    """
+    Parameters:
+     - workflowName
+    """
+    pass
+
+
+class Client(Iface):
+  def __init__(self, iprot, oprot=None):
+    self._iprot = self._oprot = iprot
+    if oprot is not None:
+      self._oprot = oprot
+    self._seqid = 0
+
+  def getAPIVersion(self):
+    """
+    Fetch Apache Airavata API version
+    """
+    self.send_getAPIVersion()
+    return self.recv_getAPIVersion()
+
+  def send_getAPIVersion(self):
+    self._oprot.writeMessageBegin('getAPIVersion', TMessageType.CALL, self._seqid)
+    args = getAPIVersion_args()
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getAPIVersion(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getAPIVersion_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getAPIVersion failed: unknown result");
+
+  def addGateway(self, gateway):
+    """
+    Parameters:
+     - gateway
+    """
+    self.send_addGateway(gateway)
+    return self.recv_addGateway()
+
+  def send_addGateway(self, gateway):
+    self._oprot.writeMessageBegin('addGateway', TMessageType.CALL, self._seqid)
+    args = addGateway_args()
+    args.gateway = gateway
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_addGateway(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = addGateway_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "addGateway failed: unknown result");
+
+  def updateGateway(self, gatewayId, updatedGateway):
+    """
+    Parameters:
+     - gatewayId
+     - updatedGateway
+    """
+    self.send_updateGateway(gatewayId, updatedGateway)
+    self.recv_updateGateway()
+
+  def send_updateGateway(self, gatewayId, updatedGateway):
+    self._oprot.writeMessageBegin('updateGateway', TMessageType.CALL, self._seqid)
+    args = updateGateway_args()
+    args.gatewayId = gatewayId
+    args.updatedGateway = updatedGateway
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_updateGateway(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = updateGateway_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    return
+
+  def getGateway(self, gatewayId):
+    """
+    Parameters:
+     - gatewayId
+    """
+    self.send_getGateway(gatewayId)
+    return self.recv_getGateway()
+
+  def send_getGateway(self, gatewayId):
+    self._oprot.writeMessageBegin('getGateway', TMessageType.CALL, self._seqid)
+    args = getGateway_args()
+    args.gatewayId = gatewayId
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getGateway(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getGateway_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getGateway failed: unknown result");
+
+  def deleteGateway(self, gatewayId):
+    """
+    Parameters:
+     - gatewayId
+    """
+    self.send_deleteGateway(gatewayId)
+    return self.recv_deleteGateway()
+
+  def send_deleteGateway(self, gatewayId):
+    self._oprot.writeMessageBegin('deleteGateway', TMessageType.CALL, self._seqid)
+    args = deleteGateway_args()
+    args.gatewayId = gatewayId
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_deleteGateway(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = deleteGateway_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "deleteGateway failed: unknown result");
+
+  def getAllGateways(self):
+    self.send_getAllGateways()
+    return self.recv_getAllGateways()
+
+  def send_getAllGateways(self):
+    self._oprot.writeMessageBegin('getAllGateways', TMessageType.CALL, self._seqid)
+    args = getAllGateways_args()
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getAllGateways(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getAllGateways_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getAllGateways failed: unknown result");
+
+  def isGatewayExist(self, gatewayId):
+    """
+    Parameters:
+     - gatewayId
+    """
+    self.send_isGatewayExist(gatewayId)
+    return self.recv_isGatewayExist()
+
+  def send_isGatewayExist(self, gatewayId):
+    self._oprot.writeMessageBegin('isGatewayExist', TMessageType.CALL, self._seqid)
+    args = isGatewayExist_args()
+    args.gatewayId = gatewayId
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_isGatewayExist(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = isGatewayExist_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "isGatewayExist failed: unknown result");
+
+  def generateAndRegisterSSHKeys(self, gatewayId, userName):
+    """
+    Generate and Register SSH Key Pair with Airavata Credential Store.
+
+    @param gatewayId
+       The identifier for the requested gateway.
+
+    @param userName
+       The User for which the credential should be registered. For community accounts, this user is the name of the
+       community user name. For computational resources, this user name need not be the same user name on resoruces.
+
+    @return airavataCredStoreToken
+      An SSH Key pair is generated and stored in the credential store and associated with users or community account
+      belonging to a gateway.
+
+
+
+    Parameters:
+     - gatewayId
+     - userName
+    """
+    self.send_generateAndRegisterSSHKeys(gatewayId, userName)
+    return self.recv_generateAndRegisterSSHKeys()
+
+  def send_generateAndRegisterSSHKeys(self, gatewayId, userName):
+    self._oprot.writeMessageBegin('generateAndRegisterSSHKeys', TMessageType.CALL, self._seqid)
+    args = generateAndRegisterSSHKeys_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_generateAndRegisterSSHKeys(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = generateAndRegisterSSHKeys_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "generateAndRegisterSSHKeys failed: unknown result");
+
+  def getSSHPubKey(self, airavataCredStoreToken):
+    """
+    Parameters:
+     - airavataCredStoreToken
+    """
+    self.send_getSSHPubKey(airavataCredStoreToken)
+    return self.recv_getSSHPubKey()
+
+  def send_getSSHPubKey(self, airavataCredStoreToken):
+    self._oprot.writeMessageBegin('getSSHPubKey', TMessageType.CALL, self._seqid)
+    args = getSSHPubKey_args()
+    args.airavataCredStoreToken = airavataCredStoreToken
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getSSHPubKey(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getSSHPubKey_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getSSHPubKey failed: unknown result");
+
+  def getAllUserSSHPubKeys(self, userName):
+    """
+    Parameters:
+     - userName
+    """
+    self.send_getAllUserSSHPubKeys(userName)
+    return self.recv_getAllUserSSHPubKeys()
+
+  def send_getAllUserSSHPubKeys(self, userName):
+    self._oprot.writeMessageBegin('getAllUserSSHPubKeys', TMessageType.CALL, self._seqid)
+    args = getAllUserSSHPubKeys_args()
+    args.userName = userName
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getAllUserSSHPubKeys(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getAllUserSSHPubKeys_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getAllUserSSHPubKeys failed: unknown result");
+
+  def createProject(self, gatewayId, project):
+    """
+    Creates a Project with basic metadata.
+       A Project is a container of experiments.
+
+    @param gatewayId
+       The identifier for the requested gateway.
+
+    @param Project
+       The Project Object described in the workspaceModel
+
+
+    Parameters:
+     - gatewayId
+     - project
+    """
+    self.send_createProject(gatewayId, project)
+    return self.recv_createProject()
+
+  def send_createProject(self, gatewayId, project):
+    self._oprot.writeMessageBegin('createProject', TMessageType.CALL, self._seqid)
+    args = createProject_args()
+    args.gatewayId = gatewayId
+    args.project = project
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_createProject(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = createProject_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "createProject failed: unknown result");
+
+  def updateProject(self, projectId, updatedProject):
+    """
+    Update a Project
+
+
+    Parameters:
+     - projectId
+     - updatedProject
+    """
+    self.send_updateProject(projectId, updatedProject)
+    self.recv_updateProject()
+
+  def send_updateProject(self, projectId, updatedProject):
+    self._oprot.writeMessageBegin('updateProject', TMessageType.CALL, self._seqid)
+    args = updateProject_args()
+    args.projectId = projectId
+    args.updatedProject = updatedProject
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_updateProject(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = updateProject_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    if result.pnfe is not None:
+      raise result.pnfe
+    return
+
+  def getProject(self, projectId):
+    """
+    Get a Project by ID
+
+
+    Parameters:
+     - projectId
+    """
+    self.send_getProject(projectId)
+    return self.recv_getProject()
+
+  def send_getProject(self, projectId):
+    self._oprot.writeMessageBegin('getProject', TMessageType.CALL, self._seqid)
+    args = getProject_args()
+    args.projectId = projectId
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getProject(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getProject_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    if result.pnfe is not None:
+      raise result.pnfe
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getProject failed: unknown result");
+
+  def deleteProject(self, projectId):
+    """
+    Parameters:
+     - projectId
+    """
+    self.send_deleteProject(projectId)
+    return self.recv_deleteProject()
+
+  def send_deleteProject(self, projectId):
+    self._oprot.writeMessageBegin('deleteProject', TMessageType.CALL, self._seqid)
+    args = deleteProject_args()
+    args.projectId = projectId
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_deleteProject(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = deleteProject_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    if result.pnfe is not None:
+      raise result.pnfe
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "deleteProject failed: unknown result");
+
+  def getAllUserProjects(self, gatewayId, userName):
+    """
+      * Get all Project by user
+      *
+      * @param gatewayId
+      *    The identifier for the requested gateway.
+      *
+      * @param userName
+      *    The Project Object described in the workspaceModel
+      *
+    *
+
+    Parameters:
+     - gatewayId
+     - userName
+    """
+    self.send_getAllUserProjects(gatewayId, userName)
+    return self.recv_getAllUserProjects()
+
+  def send_getAllUserProjects(self, gatewayId, userName):
+    self._oprot.writeMessageBegin('getAllUserProjects', TMessageType.CALL, self._seqid)
+    args = getAllUserProjects_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getAllUserProjects(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getAllUserProjects_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getAllUserProjects failed: unknown result");
+
+  def searchProjectsByProjectName(self, gatewayId, userName, projectName):
+    """
+    Get all Project for user by project name
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - projectName
+    """
+    self.send_searchProjectsByProjectName(gatewayId, userName, projectName)
+    return self.recv_searchProjectsByProjectName()
+
+  def send_searchProjectsByProjectName(self, gatewayId, userName, projectName):
+    self._oprot.writeMessageBegin('searchProjectsByProjectName', TMessageType.CALL, self._seqid)
+    args = searchProjectsByProjectName_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.projectName = projectName
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_searchProjectsByProjectName(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = searchProjectsByProjectName_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "searchProjectsByProjectName failed: unknown result");
+
+  def searchProjectsByProjectDesc(self, gatewayId, userName, description):
+    """
+    Get all Project for user by project description
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - description
+    """
+    self.send_searchProjectsByProjectDesc(gatewayId, userName, description)
+    return self.recv_searchProjectsByProjectDesc()
+
+  def send_searchProjectsByProjectDesc(self, gatewayId, userName, description):
+    self._oprot.writeMessageBegin('searchProjectsByProjectDesc', TMessageType.CALL, self._seqid)
+    args = searchProjectsByProjectDesc_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.description = description
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_searchProjectsByProjectDesc(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = searchProjectsByProjectDesc_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "searchProjectsByProjectDesc failed: unknown result");
+
+  def searchExperimentsByName(self, gatewayId, userName, expName):
+    """
+    Search Experiments by experiment name
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - expName
+    """
+    self.send_searchExperimentsByName(gatewayId, userName, expName)
+    return self.recv_searchExperimentsByName()
+
+  def send_searchExperimentsByName(self, gatewayId, userName, expName):
+    self._oprot.writeMessageBegin('searchExperimentsByName', TMessageType.CALL, self._seqid)
+    args = searchExperimentsByName_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.expName = expName
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_searchExperimentsByName(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = searchExperimentsByName_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "searchExperimentsByName failed: unknown result");
+
+  def searchExperimentsByDesc(self, gatewayId, userName, description):
+    """
+    Search Experiments by experiment name
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - description
+    """
+    self.send_searchExperimentsByDesc(gatewayId, userName, description)
+    return self.recv_searchExperimentsByDesc()
+
+  def send_searchExperimentsByDesc(self, gatewayId, userName, description):
+    self._oprot.writeMessageBegin('searchExperimentsByDesc', TMessageType.CALL, self._seqid)
+    args = searchExperimentsByDesc_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.description = description
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_searchExperimentsByDesc(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = searchExperimentsByDesc_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "searchExperimentsByDesc failed: unknown result");
+
+  def searchExperimentsByApplication(self, gatewayId, userName, applicationId):
+    """
+    Search Experiments by application id
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - applicationId
+    """
+    self.send_searchExperimentsByApplication(gatewayId, userName, applicationId)
+    return self.recv_searchExperimentsByApplication()
+
+  def send_searchExperimentsByApplication(self, gatewayId, userName, applicationId):
+    self._oprot.writeMessageBegin('searchExperimentsByApplication', TMessageType.CALL, self._seqid)
+    args = searchExperimentsByApplication_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.applicationId = applicationId
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_searchExperimentsByApplication(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = searchExperimentsByApplication_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "searchExperimentsByApplication failed: unknown result");
+
+  def searchExperimentsByStatus(self, gatewayId, userName, experimentState):
+    """
+    Search Experiments by experiment status
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - experimentState
+    """
+    self.send_searchExperimentsByStatus(gatewayId, userName, experimentState)
+    return self.recv_searchExperimentsByStatus()
+
+  def send_searchExperimentsByStatus(self, gatewayId, userName, experimentState):
+    self._oprot.writeMessageBegin('searchExperimentsByStatus', TMessageType.CALL, self._seqid)
+    args = searchExperimentsByStatus_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.experimentState = experimentState
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_searchExperimentsByStatus(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = searchExperimentsByStatus_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "searchExperimentsByStatus failed: unknown result");
+
+  def searchExperimentsByCreationTime(self, gatewayId, userName, fromTime, toTime):
+    """
+    Search Experiments by experiment status
+
+
+    Parameters:
+     - gatewayId
+     - userName
+     - fromTime
+     - toTime
+    """
+    self.send_searchExperimentsByCreationTime(gatewayId, userName, fromTime, toTime)
+    return self.recv_searchExperimentsByCreationTime()
+
+  def send_searchExperimentsByCreationTime(self, gatewayId, userName, fromTime, toTime):
+    self._oprot.writeMessageBegin('searchExperimentsByCreationTime', TMessageType.CALL, self._seqid)
+    args = searchExperimentsByCreationTime_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.fromTime = fromTime
+    args.toTime = toTime
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_searchExperimentsByCreationTime(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = searchExperimentsByCreationTime_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "searchExperimentsByCreationTime failed: unknown result");
+
+  def getAllExperimentsInProject(self, projectId):
+    """
+    Get all Experiments within a Project
+
+
+    Parameters:
+     - projectId
+    """
+    self.send_getAllExperimentsInProject(projectId)
+    return self.recv_getAllExperimentsInProject()
+
+  def send_getAllExperimentsInProject(self, projectId):
+    self._oprot.writeMessageBegin('getAllExperimentsInProject', TMessageType.CALL, self._seqid)
+    args = getAllExperimentsInProject_args()
+    args.projectId = projectId
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getAllExperimentsInProject(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getAllExperimentsInProject_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    if result.pnfe is not None:
+      raise result.pnfe
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getAllExperimentsInProject failed: unknown result");
+
+  def getAllUserExperiments(self, gatewayId, userName):
+    """
+    Get all Experiments by user
+
+
+    Parameters:
+     - gatewayId
+     - userName
+    """
+    self.send_getAllUserExperiments(gatewayId, userName)
+    return self.recv_getAllUserExperiments()
+
+  def send_getAllUserExperiments(self, gatewayId, userName):
+    self._oprot.writeMessageBegin('getAllUserExperiments', TMessageType.CALL, self._seqid)
+    args = getAllUserExperiments_args()
+    args.gatewayId = gatewayId
+    args.userName = userName
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getAllUserExperiments(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getAllUserExperiments_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "getAllUserExperiments failed: unknown result");
+
+  def createExperiment(self, gatewayId, experiment):
+    """
+    Create an experiment for the specified user belonging to the gateway. The gateway identity is not explicitly passed
+      but inferred from the authentication header. This experiment is just a persistent place holder. The client
+      has to subsequently configure and launch the created experiment. No action is taken on Airavata Server except
+      registering the experiment in a persistent store.
+
+    @param basicExperimentMetadata
+       The create experiment will require the basic experiment metadata like the name and description, intended user,
+         the gateway identifer and if the experiment should be shared public by defualt. During the creation of an experiment
+         the ExperimentMetadata is a required field.
+
+    @return
+      The server-side generated airavata experiment globally unique identifier.
+
+    @throws org.apache.airavata.model.error.InvalidRequestException
+       For any incorrect forming of the request itself.
+
+    @throws org.apache.airavata.model.error.AiravataClientException
+       The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
+
+         UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
+            step, then Airavata Registry will not have a provenance area setup. The client has to follow
+            gateway registration steps and retry this request.
+
+         AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
+            For now this is a place holder.
+
+         INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
+            is implemented, the authorization will be more substantial.
+
+    @throws org.apache.airavata.model.error.AiravataSystemException
+       This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
+          rather an Airavata Administrator will be notified to take corrective action.
+
+
+    Parameters:
+     - gatewayId
+     - experiment
+    """
+    self.send_createExperiment(gatewayId, experiment)
+    return self.recv_createExperiment()
+
+  def send_createExperiment(self, gatewayId, experiment):
+    self._oprot.writeMessageBegin('createExperiment', TMessageType.CALL, self._seqid)
+    args = createExperiment_args()
+    args.gatewayId = gatewayId
+    args.experiment = experiment
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_createExperiment(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = createExperiment_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+      return result.success
+    if result.ire is not None:
+      raise result.ire
+    if result.ace is not None:
+      raise result.ace
+    if result.ase is not None:
+      raise result.ase
+    raise TApplicationException(TApplicationException.MISSING_RESULT, "createExperiment failed: unknown result");
+
+  def getExperiment(self, airavataExperimentId):
+    """
+    Fetch previously created experiment metadata.
+
+    @param airavataExperimentId
+       The identifier for the requested experiment. This is returned during the create experiment step.
+
+    @return experimentMetada
+      This method will return the previously stored experiment metadata.
+
+    @throws org.apache.airavata.model.error.InvalidRequestException
+       For any incorrect forming of the request itself.
+
+    @throws org.apache.airavata.model.error.ExperimentNotFoundException
+       If the specified experiment is not previously created, then an Experiment Not Found Exception is thrown.
+
+    @throws org.apache.airavata.model.error.AiravataClientException
+       The following list of exceptions are thrown which Airavata Client can take corrective actions to resolve:
+         
+         UNKNOWN_GATEWAY_ID - If a Gateway is not registered with Airavata as a one time administrative
+            step, then Airavata Registry will not have a provenance area setup. The client has to follow
+            gateway registration steps and retry this request.
+
+         AUTHENTICATION_FAILURE - How Authentication will be implemented is yet to be determined.
+            For now this is a place holder.
+
+         INVALID_AUTHORIZATION - This will throw an authorization exception. When a more robust security hand-shake
+            is implemented, the authorization will be more substantial.
+
+    @throws org.apache.airavata.model.error.AiravataSystemException
+       This exception will be thrown for any Airavata Server side issues and if the problem cannot be corrected by the client
+          rather an Airavata Administrator will be notified to take corrective action.
+
+
+    Parameters:
+     - airavataExperimentId
+    """
+    self.send_getExperiment(airavataExperimentId)
+    return self.recv_getExperiment()
+
+  def send_getExperiment(self, airavataExperimentId):
+    self._oprot.writeMessageBegin('getExperiment', TMessageType.CALL, self._seqid)
+    args = getExperiment_args()
+    args.airavataExperimentId = airavataExperimentId
+    args.write(self._oprot)
+    self._oprot.writeMessageEnd()
+    self._oprot.trans.flush()
+
+  def recv_getExperiment(self):
+    (fname, mtype, rseqid) = self._iprot.readMessageBegin()
+    if mtype == TMessageType.EXCEPTION:
+      x = TApplicationException()
+      x.read(self._iprot)
+      self._iprot.readMessageEnd()
+      raise x
+    result = getExperiment_result()
+    result.read(self._iprot)
+    self._iprot.readMessageEnd()
+    if result.success is not None:
+ 

<TRUNCATED>

[8/8] airavata git commit: Adding python generated code - AIRAVATA-1642

Posted by sm...@apache.org.
Adding python generated code - AIRAVATA-1642


Project: http://git-wip-us.apache.org/repos/asf/airavata/repo
Commit: http://git-wip-us.apache.org/repos/asf/airavata/commit/97c7a736
Tree: http://git-wip-us.apache.org/repos/asf/airavata/tree/97c7a736
Diff: http://git-wip-us.apache.org/repos/asf/airavata/diff/97c7a736

Branch: refs/heads/master
Commit: 97c7a736236bd8275e2ea5c1fc9877ece2e4a550
Parents: c090519
Author: Suresh Marru <sm...@apache.org>
Authored: Tue Mar 24 05:19:03 2015 -0400
Committer: Suresh Marru <sm...@apache.org>
Committed: Tue Mar 24 05:19:03 2015 -0400

----------------------------------------------------------------------
 .../airavata-python-sdk/pom.xml                 |   114 +
 .../src/main/resources/lib/__init__.py          |     0
 .../src/main/resources/lib/apache/__init__.py   |     0
 .../resources/lib/apache/airavata/__init__.py   |     0
 .../lib/apache/airavata/api/Airavata-remote     |   893 +
 .../lib/apache/airavata/api/Airavata.py         | 29153 +++++++++++++++++
 .../lib/apache/airavata/api/__init__.py         |     1 +
 .../lib/apache/airavata/api/constants.py        |    12 +
 .../lib/apache/airavata/api/error/__init__.py   |     1 +
 .../lib/apache/airavata/api/error/constants.py  |    11 +
 .../lib/apache/airavata/api/error/ttypes.py     |   881 +
 .../resources/lib/apache/airavata/api/ttypes.py |    28 +
 .../lib/apache/airavata/model/__init__.py       |     1 +
 .../airavata/model/appcatalog/__init__.py       |     0
 .../model/appcatalog/appdeployment/__init__.py  |     1 +
 .../model/appcatalog/appdeployment/constants.py |    12 +
 .../model/appcatalog/appdeployment/ttypes.py    |   530 +
 .../model/appcatalog/appinterface/__init__.py   |     1 +
 .../model/appcatalog/appinterface/constants.py  |    12 +
 .../model/appcatalog/appinterface/ttypes.py     |   600 +
 .../appcatalog/computeresource/__init__.py      |     1 +
 .../appcatalog/computeresource/constants.py     |    12 +
 .../model/appcatalog/computeresource/ttypes.py  |  1967 ++
 .../model/appcatalog/gatewayprofile/__init__.py |     1 +
 .../appcatalog/gatewayprofile/constants.py      |    11 +
 .../model/appcatalog/gatewayprofile/ttypes.py   |   290 +
 .../lib/apache/airavata/model/constants.py      |    11 +
 .../apache/airavata/model/messaging/__init__.py |     0
 .../airavata/model/messaging/event/__init__.py  |     1 +
 .../airavata/model/messaging/event/constants.py |    12 +
 .../airavata/model/messaging/event/ttypes.py    |  1234 +
 .../lib/apache/airavata/model/ttypes.py         |    22 +
 .../apache/airavata/model/workflow/__init__.py  |     1 +
 .../apache/airavata/model/workflow/constants.py |    12 +
 .../apache/airavata/model/workflow/ttypes.py    |   162 +
 .../apache/airavata/model/workspace/__init__.py |     1 +
 .../airavata/model/workspace/constants.py       |    11 +
 .../model/workspace/experiment/__init__.py      |     1 +
 .../model/workspace/experiment/constants.py     |    14 +
 .../model/workspace/experiment/ttypes.py        |  3042 ++
 .../apache/airavata/model/workspace/ttypes.py   |   429 +
 airavata-api/airavata-client-sdks/pom.xml       |     5 +-
 .../airavataDataModel.thrift                    |     1 +
 .../airavataErrors.thrift                       |     2 +-
 .../applicationDeploymentModel.thrift           |     1 +
 .../applicationInterfaceModel.thrift            |     1 +
 .../computeResourceModel.thrift                 |     1 +
 .../experimentModel.thrift                      |     1 +
 .../gatewayResourceProfileModel.thrift          |     2 +
 .../messagingEvents.thrift                      |     1 +
 .../workflowAPI.thrift                          |     4 +-
 .../workflowDataModel.thrift                    |     2 +
 .../workspaceModel.thrift                       |     1 +
 53 files changed, 39503 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/pom.xml
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/pom.xml b/airavata-api/airavata-client-sdks/airavata-python-sdk/pom.xml
new file mode 100644
index 0000000..9f5c1ae
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/pom.xml
@@ -0,0 +1,114 @@
+<?xml version="1.0" encoding="UTF-8"?>
+
+<!--Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file 
+    distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under 
+    the Apache License, Version 2.0 (theƏ "License"); you may not use this file except in compliance with the License. You may 
+    obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to 
+    in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF 
+    ANY ~ KIND, either express or implied. See the License for the specific language governing permissions and limitations under 
+    the License. -->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+    <parent>
+        <groupId>org.apache.airavata</groupId>
+        <artifactId>airavata-client-sdks</artifactId>
+        <version>0.15-SNAPSHOT</version>
+        <relativePath>../pom.xml</relativePath>
+    </parent>
+
+    <modelVersion>4.0.0</modelVersion>
+    <artifactId>apache-airavata-client-python-sdk</artifactId>
+    <name>Airavata Client Python SDK</name>
+    <packaging>pom</packaging>
+    <url>http://airavata.apache.org/</url>
+
+    <build>
+        <plugins>
+	    <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-dependency-plugin</artifactId>
+                <version>2.8</version>
+                <executions>
+                    <execution>
+                        <id>unpack</id>
+                        <phase>compile</phase>
+                        <goals>
+                            <goal>unpack</goal>
+                        </goals>
+                        <configuration>
+                            <artifactItems>
+                                <artifactItem>
+                                    <groupId>org.apache.airavata</groupId>
+                                    <artifactId>airavata-client-configuration</artifactId>
+                                    <version>${project.version}</version>
+                                    <type>jar</type>
+                                </artifactItem>
+                            </artifactItems>
+                            <outputDirectory>${project.build.directory}/conf</outputDirectory>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.apache.maven.plugins</groupId>
+                <artifactId>maven-assembly-plugin</artifactId>
+                <executions>
+                    <execution>
+                        <id>distribution-package</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>single</goal>
+                        </goals>
+                        <configuration>
+                            <finalName>${archive.name}-${project.version}</finalName>
+                            <descriptors>
+                                <descriptor>src/main/assembly/bin-assembly.xml</descriptor>
+                            </descriptors>
+                            <attach>false</attach>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <version>1.7</version>
+                <executions>
+                    <execution>
+                        <id>attach-artifacts</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>attach-artifact</goal>
+                        </goals>
+                        <configuration>
+                            <artifacts>
+                                <artifact>
+                                    <file>${airavata.client-bin.zip}</file>
+                                    <type>zip</type>
+                                    <classifier>bin</classifier>
+                                </artifact>
+                                <artifact>
+                                    <file>${airavata.client-bin.tar.gz}</file>
+                                    <type>tar.gz</type>
+                                    <classifier>bin</classifier>
+                                </artifact>
+                            </artifacts>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+    
+    <properties>
+        <jersey.version>1.13</jersey.version>
+        <grizzly.version>2.0.0-M3</grizzly.version>
+        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
+        <archive.name>apache-airavata-client-python-sdk</archive.name>
+        <used.axis2.release>${axis2.version}</used.axis2.release>
+        <airavata.client-dist.name>${archive.name}-${project.version}</airavata.client-dist.name>
+        <airavata.client-bin.zip>${project.build.directory}/${airavata.client-dist.name}-bin.zip</airavata.client-bin.zip>
+        <airavata.client-bin.tar.gz>${project.build.directory}/${airavata.client-dist.name}-bin.tar.gz</airavata.client-bin.tar.gz>
+    </properties>
+</project>

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/__init__.py
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/__init__.py b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/__init__.py
new file mode 100644
index 0000000..e69de29

http://git-wip-us.apache.org/repos/asf/airavata/blob/97c7a736/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/Airavata-remote
----------------------------------------------------------------------
diff --git a/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/Airavata-remote b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/Airavata-remote
new file mode 100755
index 0000000..f297497
--- /dev/null
+++ b/airavata-api/airavata-client-sdks/airavata-python-sdk/src/main/resources/lib/apache/airavata/api/Airavata-remote
@@ -0,0 +1,893 @@
+#!/usr/bin/env python
+#
+# Autogenerated by Thrift Compiler (0.9.1)
+#
+# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
+#
+#  options string: py
+#
+
+import sys
+import pprint
+from urlparse import urlparse
+from thrift.transport import TTransport
+from thrift.transport import TSocket
+from thrift.transport import THttpClient
+from thrift.protocol import TBinaryProtocol
+
+from apache.airavata.api import Airavata
+from apache.airavata.api.ttypes import *
+
+if len(sys.argv) <= 1 or sys.argv[1] == '--help':
+  print ''
+  print 'Usage: ' + sys.argv[0] + ' [-h host[:port]] [-u url] [-f[ramed]] function [arg1 [arg2...]]'
+  print ''
+  print 'Functions:'
+  print '  string getAPIVersion()'
+  print '  string addGateway(Gateway gateway)'
+  print '  void updateGateway(string gatewayId, Gateway updatedGateway)'
+  print '  Gateway getGateway(string gatewayId)'
+  print '  bool deleteGateway(string gatewayId)'
+  print '   getAllGateways()'
+  print '  bool isGatewayExist(string gatewayId)'
+  print '  string generateAndRegisterSSHKeys(string gatewayId, string userName)'
+  print '  string getSSHPubKey(string airavataCredStoreToken)'
+  print '   getAllUserSSHPubKeys(string userName)'
+  print '  string createProject(string gatewayId, Project project)'
+  print '  void updateProject(string projectId, Project updatedProject)'
+  print '  Project getProject(string projectId)'
+  print '  bool deleteProject(string projectId)'
+  print '   getAllUserProjects(string gatewayId, string userName)'
+  print '   searchProjectsByProjectName(string gatewayId, string userName, string projectName)'
+  print '   searchProjectsByProjectDesc(string gatewayId, string userName, string description)'
+  print '   searchExperimentsByName(string gatewayId, string userName, string expName)'
+  print '   searchExperimentsByDesc(string gatewayId, string userName, string description)'
+  print '   searchExperimentsByApplication(string gatewayId, string userName, string applicationId)'
+  print '   searchExperimentsByStatus(string gatewayId, string userName, ExperimentState experimentState)'
+  print '   searchExperimentsByCreationTime(string gatewayId, string userName, i64 fromTime, i64 toTime)'
+  print '   getAllExperimentsInProject(string projectId)'
+  print '   getAllUserExperiments(string gatewayId, string userName)'
+  print '  string createExperiment(string gatewayId, Experiment experiment)'
+  print '  Experiment getExperiment(string airavataExperimentId)'
+  print '  void updateExperiment(string airavataExperimentId, Experiment experiment)'
+  print '  void updateExperimentConfiguration(string airavataExperimentId, UserConfigurationData userConfiguration)'
+  print '  void updateResourceScheduleing(string airavataExperimentId, ComputationalResourceScheduling resourceScheduling)'
+  print '  bool validateExperiment(string airavataExperimentId)'
+  print '  void launchExperiment(string airavataExperimentId, string airavataCredStoreToken)'
+  print '  ExperimentStatus getExperimentStatus(string airavataExperimentId)'
+  print '   getExperimentOutputs(string airavataExperimentId)'
+  print '   getIntermediateOutputs(string airavataExperimentId)'
+  print '   getJobStatuses(string airavataExperimentId)'
+  print '   getJobDetails(string airavataExperimentId)'
+  print '   getDataTransferDetails(string airavataExperimentId)'
+  print '  string cloneExperiment(string existingExperimentID, string newExperimentName)'
+  print '  void terminateExperiment(string airavataExperimentId)'
+  print '  string registerApplicationModule(string gatewayId, ApplicationModule applicationModule)'
+  print '  ApplicationModule getApplicationModule(string appModuleId)'
+  print '  bool updateApplicationModule(string appModuleId, ApplicationModule applicationModule)'
+  print '   getAllAppModules(string gatewayId)'
+  print '  bool deleteApplicationModule(string appModuleId)'
+  print '  string registerApplicationDeployment(string gatewayId, ApplicationDeploymentDescription applicationDeployment)'
+  print '  ApplicationDeploymentDescription getApplicationDeployment(string appDeploymentId)'
+  print '  bool updateApplicationDeployment(string appDeploymentId, ApplicationDeploymentDescription applicationDeployment)'
+  print '  bool deleteApplicationDeployment(string appDeploymentId)'
+  print '   getAllApplicationDeployments(string gatewayId)'
+  print '   getAppModuleDeployedResources(string appModuleId)'
+  print '  string registerApplicationInterface(string gatewayId, ApplicationInterfaceDescription applicationInterface)'
+  print '  ApplicationInterfaceDescription getApplicationInterface(string appInterfaceId)'
+  print '  bool updateApplicationInterface(string appInterfaceId, ApplicationInterfaceDescription applicationInterface)'
+  print '  bool deleteApplicationInterface(string appInterfaceId)'
+  print '   getAllApplicationInterfaceNames(string gatewayId)'
+  print '   getAllApplicationInterfaces(string gatewayId)'
+  print '   getApplicationInputs(string appInterfaceId)'
+  print '   getApplicationOutputs(string appInterfaceId)'
+  print '   getAvailableAppInterfaceComputeResources(string appInterfaceId)'
+  print '  string registerComputeResource(ComputeResourceDescription computeResourceDescription)'
+  print '  ComputeResourceDescription getComputeResource(string computeResourceId)'
+  print '   getAllComputeResourceNames()'
+  print '  bool updateComputeResource(string computeResourceId, ComputeResourceDescription computeResourceDescription)'
+  print '  bool deleteComputeResource(string computeResourceId)'
+  print '  string addLocalSubmissionDetails(string computeResourceId, i32 priorityOrder, LOCALSubmission localSubmission)'
+  print '  bool updateLocalSubmissionDetails(string jobSubmissionInterfaceId, LOCALSubmission localSubmission)'
+  print '  LOCALSubmission getLocalJobSubmission(string jobSubmissionId)'
+  print '  string addSSHJobSubmissionDetails(string computeResourceId, i32 priorityOrder, SSHJobSubmission sshJobSubmission)'
+  print '  SSHJobSubmission getSSHJobSubmission(string jobSubmissionId)'
+  print '  string addUNICOREJobSubmissionDetails(string computeResourceId, i32 priorityOrder, UnicoreJobSubmission unicoreJobSubmission)'
+  print '  UnicoreJobSubmission getUnicoreJobSubmission(string jobSubmissionId)'
+  print '  string addCloudJobSubmissionDetails(string computeResourceId, i32 priorityOrder, CloudJobSubmission cloudSubmission)'
+  print '  CloudJobSubmission getCloudJobSubmission(string jobSubmissionId)'
+  print '  bool updateSSHJobSubmissionDetails(string jobSubmissionInterfaceId, SSHJobSubmission sshJobSubmission)'
+  print '  bool updateCloudJobSubmissionDetails(string jobSubmissionInterfaceId, CloudJobSubmission sshJobSubmission)'
+  print '  bool updateUnicoreJobSubmissionDetails(string jobSubmissionInterfaceId, UnicoreJobSubmission unicoreJobSubmission)'
+  print '  string addLocalDataMovementDetails(string computeResourceId, i32 priorityOrder, LOCALDataMovement localDataMovement)'
+  print '  bool updateLocalDataMovementDetails(string dataMovementInterfaceId, LOCALDataMovement localDataMovement)'
+  print '  LOCALDataMovement getLocalDataMovement(string dataMovementId)'
+  print '  string addSCPDataMovementDetails(string computeResourceId, i32 priorityOrder, SCPDataMovement scpDataMovement)'
+  print '  bool updateSCPDataMovementDetails(string dataMovementInterfaceId, SCPDataMovement scpDataMovement)'
+  print '  SCPDataMovement getSCPDataMovement(string dataMovementId)'
+  print '  string addUnicoreDataMovementDetails(string computeResourceId, i32 priorityOrder, UnicoreDataMovement unicoreDataMovement)'
+  print '  bool updateUnicoreDataMovementDetails(string dataMovementInterfaceId, UnicoreDataMovement unicoreDataMovement)'
+  print '  UnicoreDataMovement getUnicoreDataMovement(string dataMovementId)'
+  print '  string addGridFTPDataMovementDetails(string computeResourceId, i32 priorityOrder, GridFTPDataMovement gridFTPDataMovement)'
+  print '  bool updateGridFTPDataMovementDetails(string dataMovementInterfaceId, GridFTPDataMovement gridFTPDataMovement)'
+  print '  GridFTPDataMovement getGridFTPDataMovement(string dataMovementId)'
+  print '  bool changeJobSubmissionPriority(string jobSubmissionInterfaceId, i32 newPriorityOrder)'
+  print '  bool changeDataMovementPriority(string dataMovementInterfaceId, i32 newPriorityOrder)'
+  print '  bool changeJobSubmissionPriorities( jobSubmissionPriorityMap)'
+  print '  bool changeDataMovementPriorities( dataMovementPriorityMap)'
+  print '  bool deleteJobSubmissionInterface(string computeResourceId, string jobSubmissionInterfaceId)'
+  print '  bool deleteDataMovementInterface(string computeResourceId, string dataMovementInterfaceId)'
+  print '  string registerResourceJobManager(ResourceJobManager resourceJobManager)'
+  print '  bool updateResourceJobManager(string resourceJobManagerId, ResourceJobManager updatedResourceJobManager)'
+  print '  ResourceJobManager getResourceJobManager(string resourceJobManagerId)'
+  print '  bool deleteResourceJobManager(string resourceJobManagerId)'
+  print '  bool deleteBatchQueue(string computeResourceId, string queueName)'
+  print '  string registerGatewayResourceProfile(GatewayResourceProfile gatewayResourceProfile)'
+  print '  GatewayResourceProfile getGatewayResourceProfile(string gatewayID)'
+  print '  bool updateGatewayResourceProfile(string gatewayID, GatewayResourceProfile gatewayResourceProfile)'
+  print '  bool deleteGatewayResourceProfile(string gatewayID)'
+  print '  bool addGatewayComputeResourcePreference(string gatewayID, string computeResourceId, ComputeResourcePreference computeResourcePreference)'
+  print '  ComputeResourcePreference getGatewayComputeResourcePreference(string gatewayID, string computeResourceId)'
+  print '   getAllGatewayComputeResourcePreferences(string gatewayID)'
+  print '   getAllGatewayComputeResources()'
+  print '  bool updateGatewayComputeResourcePreference(string gatewayID, string computeResourceId, ComputeResourcePreference computeResourcePreference)'
+  print '  bool deleteGatewayComputeResourcePreference(string gatewayID, string computeResourceId)'
+  print '   getAllWorkflows(string gatewayId)'
+  print '  Workflow getWorkflow(string workflowTemplateId)'
+  print '  void deleteWorkflow(string workflowTemplateId)'
+  print '  string registerWorkflow(string gatewayId, Workflow workflow)'
+  print '  void updateWorkflow(string workflowTemplateId, Workflow workflow)'
+  print '  string getWorkflowTemplateId(string workflowName)'
+  print '  bool isWorkflowExistWithName(string workflowName)'
+  print ''
+  sys.exit(0)
+
+pp = pprint.PrettyPrinter(indent = 2)
+host = 'localhost'
+port = 9090
+uri = ''
+framed = False
+http = False
+argi = 1
+
+if sys.argv[argi] == '-h':
+  parts = sys.argv[argi+1].split(':')
+  host = parts[0]
+  if len(parts) > 1:
+    port = int(parts[1])
+  argi += 2
+
+if sys.argv[argi] == '-u':
+  url = urlparse(sys.argv[argi+1])
+  parts = url[1].split(':')
+  host = parts[0]
+  if len(parts) > 1:
+    port = int(parts[1])
+  else:
+    port = 80
+  uri = url[2]
+  if url[4]:
+    uri += '?%s' % url[4]
+  http = True
+  argi += 2
+
+if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
+  framed = True
+  argi += 1
+
+cmd = sys.argv[argi]
+args = sys.argv[argi+1:]
+
+if http:
+  transport = THttpClient.THttpClient(host, port, uri)
+else:
+  socket = TSocket.TSocket(host, port)
+  if framed:
+    transport = TTransport.TFramedTransport(socket)
+  else:
+    transport = TTransport.TBufferedTransport(socket)
+protocol = TBinaryProtocol.TBinaryProtocol(transport)
+client = Airavata.Client(protocol)
+transport.open()
+
+if cmd == 'getAPIVersion':
+  if len(args) != 0:
+    print 'getAPIVersion requires 0 args'
+    sys.exit(1)
+  pp.pprint(client.getAPIVersion())
+
+elif cmd == 'addGateway':
+  if len(args) != 1:
+    print 'addGateway requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.addGateway(eval(args[0]),))
+
+elif cmd == 'updateGateway':
+  if len(args) != 2:
+    print 'updateGateway requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateGateway(args[0],eval(args[1]),))
+
+elif cmd == 'getGateway':
+  if len(args) != 1:
+    print 'getGateway requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getGateway(args[0],))
+
+elif cmd == 'deleteGateway':
+  if len(args) != 1:
+    print 'deleteGateway requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteGateway(args[0],))
+
+elif cmd == 'getAllGateways':
+  if len(args) != 0:
+    print 'getAllGateways requires 0 args'
+    sys.exit(1)
+  pp.pprint(client.getAllGateways())
+
+elif cmd == 'isGatewayExist':
+  if len(args) != 1:
+    print 'isGatewayExist requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.isGatewayExist(args[0],))
+
+elif cmd == 'generateAndRegisterSSHKeys':
+  if len(args) != 2:
+    print 'generateAndRegisterSSHKeys requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.generateAndRegisterSSHKeys(args[0],args[1],))
+
+elif cmd == 'getSSHPubKey':
+  if len(args) != 1:
+    print 'getSSHPubKey requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getSSHPubKey(args[0],))
+
+elif cmd == 'getAllUserSSHPubKeys':
+  if len(args) != 1:
+    print 'getAllUserSSHPubKeys requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAllUserSSHPubKeys(args[0],))
+
+elif cmd == 'createProject':
+  if len(args) != 2:
+    print 'createProject requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.createProject(args[0],eval(args[1]),))
+
+elif cmd == 'updateProject':
+  if len(args) != 2:
+    print 'updateProject requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateProject(args[0],eval(args[1]),))
+
+elif cmd == 'getProject':
+  if len(args) != 1:
+    print 'getProject requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getProject(args[0],))
+
+elif cmd == 'deleteProject':
+  if len(args) != 1:
+    print 'deleteProject requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteProject(args[0],))
+
+elif cmd == 'getAllUserProjects':
+  if len(args) != 2:
+    print 'getAllUserProjects requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.getAllUserProjects(args[0],args[1],))
+
+elif cmd == 'searchProjectsByProjectName':
+  if len(args) != 3:
+    print 'searchProjectsByProjectName requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.searchProjectsByProjectName(args[0],args[1],args[2],))
+
+elif cmd == 'searchProjectsByProjectDesc':
+  if len(args) != 3:
+    print 'searchProjectsByProjectDesc requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.searchProjectsByProjectDesc(args[0],args[1],args[2],))
+
+elif cmd == 'searchExperimentsByName':
+  if len(args) != 3:
+    print 'searchExperimentsByName requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.searchExperimentsByName(args[0],args[1],args[2],))
+
+elif cmd == 'searchExperimentsByDesc':
+  if len(args) != 3:
+    print 'searchExperimentsByDesc requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.searchExperimentsByDesc(args[0],args[1],args[2],))
+
+elif cmd == 'searchExperimentsByApplication':
+  if len(args) != 3:
+    print 'searchExperimentsByApplication requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.searchExperimentsByApplication(args[0],args[1],args[2],))
+
+elif cmd == 'searchExperimentsByStatus':
+  if len(args) != 3:
+    print 'searchExperimentsByStatus requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.searchExperimentsByStatus(args[0],args[1],eval(args[2]),))
+
+elif cmd == 'searchExperimentsByCreationTime':
+  if len(args) != 4:
+    print 'searchExperimentsByCreationTime requires 4 args'
+    sys.exit(1)
+  pp.pprint(client.searchExperimentsByCreationTime(args[0],args[1],eval(args[2]),eval(args[3]),))
+
+elif cmd == 'getAllExperimentsInProject':
+  if len(args) != 1:
+    print 'getAllExperimentsInProject requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAllExperimentsInProject(args[0],))
+
+elif cmd == 'getAllUserExperiments':
+  if len(args) != 2:
+    print 'getAllUserExperiments requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.getAllUserExperiments(args[0],args[1],))
+
+elif cmd == 'createExperiment':
+  if len(args) != 2:
+    print 'createExperiment requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.createExperiment(args[0],eval(args[1]),))
+
+elif cmd == 'getExperiment':
+  if len(args) != 1:
+    print 'getExperiment requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getExperiment(args[0],))
+
+elif cmd == 'updateExperiment':
+  if len(args) != 2:
+    print 'updateExperiment requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateExperiment(args[0],eval(args[1]),))
+
+elif cmd == 'updateExperimentConfiguration':
+  if len(args) != 2:
+    print 'updateExperimentConfiguration requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateExperimentConfiguration(args[0],eval(args[1]),))
+
+elif cmd == 'updateResourceScheduleing':
+  if len(args) != 2:
+    print 'updateResourceScheduleing requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateResourceScheduleing(args[0],eval(args[1]),))
+
+elif cmd == 'validateExperiment':
+  if len(args) != 1:
+    print 'validateExperiment requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.validateExperiment(args[0],))
+
+elif cmd == 'launchExperiment':
+  if len(args) != 2:
+    print 'launchExperiment requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.launchExperiment(args[0],args[1],))
+
+elif cmd == 'getExperimentStatus':
+  if len(args) != 1:
+    print 'getExperimentStatus requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getExperimentStatus(args[0],))
+
+elif cmd == 'getExperimentOutputs':
+  if len(args) != 1:
+    print 'getExperimentOutputs requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getExperimentOutputs(args[0],))
+
+elif cmd == 'getIntermediateOutputs':
+  if len(args) != 1:
+    print 'getIntermediateOutputs requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getIntermediateOutputs(args[0],))
+
+elif cmd == 'getJobStatuses':
+  if len(args) != 1:
+    print 'getJobStatuses requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getJobStatuses(args[0],))
+
+elif cmd == 'getJobDetails':
+  if len(args) != 1:
+    print 'getJobDetails requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getJobDetails(args[0],))
+
+elif cmd == 'getDataTransferDetails':
+  if len(args) != 1:
+    print 'getDataTransferDetails requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getDataTransferDetails(args[0],))
+
+elif cmd == 'cloneExperiment':
+  if len(args) != 2:
+    print 'cloneExperiment requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.cloneExperiment(args[0],args[1],))
+
+elif cmd == 'terminateExperiment':
+  if len(args) != 1:
+    print 'terminateExperiment requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.terminateExperiment(args[0],))
+
+elif cmd == 'registerApplicationModule':
+  if len(args) != 2:
+    print 'registerApplicationModule requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.registerApplicationModule(args[0],eval(args[1]),))
+
+elif cmd == 'getApplicationModule':
+  if len(args) != 1:
+    print 'getApplicationModule requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getApplicationModule(args[0],))
+
+elif cmd == 'updateApplicationModule':
+  if len(args) != 2:
+    print 'updateApplicationModule requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateApplicationModule(args[0],eval(args[1]),))
+
+elif cmd == 'getAllAppModules':
+  if len(args) != 1:
+    print 'getAllAppModules requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAllAppModules(args[0],))
+
+elif cmd == 'deleteApplicationModule':
+  if len(args) != 1:
+    print 'deleteApplicationModule requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteApplicationModule(args[0],))
+
+elif cmd == 'registerApplicationDeployment':
+  if len(args) != 2:
+    print 'registerApplicationDeployment requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.registerApplicationDeployment(args[0],eval(args[1]),))
+
+elif cmd == 'getApplicationDeployment':
+  if len(args) != 1:
+    print 'getApplicationDeployment requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getApplicationDeployment(args[0],))
+
+elif cmd == 'updateApplicationDeployment':
+  if len(args) != 2:
+    print 'updateApplicationDeployment requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateApplicationDeployment(args[0],eval(args[1]),))
+
+elif cmd == 'deleteApplicationDeployment':
+  if len(args) != 1:
+    print 'deleteApplicationDeployment requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteApplicationDeployment(args[0],))
+
+elif cmd == 'getAllApplicationDeployments':
+  if len(args) != 1:
+    print 'getAllApplicationDeployments requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAllApplicationDeployments(args[0],))
+
+elif cmd == 'getAppModuleDeployedResources':
+  if len(args) != 1:
+    print 'getAppModuleDeployedResources requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAppModuleDeployedResources(args[0],))
+
+elif cmd == 'registerApplicationInterface':
+  if len(args) != 2:
+    print 'registerApplicationInterface requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.registerApplicationInterface(args[0],eval(args[1]),))
+
+elif cmd == 'getApplicationInterface':
+  if len(args) != 1:
+    print 'getApplicationInterface requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getApplicationInterface(args[0],))
+
+elif cmd == 'updateApplicationInterface':
+  if len(args) != 2:
+    print 'updateApplicationInterface requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateApplicationInterface(args[0],eval(args[1]),))
+
+elif cmd == 'deleteApplicationInterface':
+  if len(args) != 1:
+    print 'deleteApplicationInterface requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteApplicationInterface(args[0],))
+
+elif cmd == 'getAllApplicationInterfaceNames':
+  if len(args) != 1:
+    print 'getAllApplicationInterfaceNames requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAllApplicationInterfaceNames(args[0],))
+
+elif cmd == 'getAllApplicationInterfaces':
+  if len(args) != 1:
+    print 'getAllApplicationInterfaces requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAllApplicationInterfaces(args[0],))
+
+elif cmd == 'getApplicationInputs':
+  if len(args) != 1:
+    print 'getApplicationInputs requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getApplicationInputs(args[0],))
+
+elif cmd == 'getApplicationOutputs':
+  if len(args) != 1:
+    print 'getApplicationOutputs requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getApplicationOutputs(args[0],))
+
+elif cmd == 'getAvailableAppInterfaceComputeResources':
+  if len(args) != 1:
+    print 'getAvailableAppInterfaceComputeResources requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAvailableAppInterfaceComputeResources(args[0],))
+
+elif cmd == 'registerComputeResource':
+  if len(args) != 1:
+    print 'registerComputeResource requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.registerComputeResource(eval(args[0]),))
+
+elif cmd == 'getComputeResource':
+  if len(args) != 1:
+    print 'getComputeResource requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getComputeResource(args[0],))
+
+elif cmd == 'getAllComputeResourceNames':
+  if len(args) != 0:
+    print 'getAllComputeResourceNames requires 0 args'
+    sys.exit(1)
+  pp.pprint(client.getAllComputeResourceNames())
+
+elif cmd == 'updateComputeResource':
+  if len(args) != 2:
+    print 'updateComputeResource requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateComputeResource(args[0],eval(args[1]),))
+
+elif cmd == 'deleteComputeResource':
+  if len(args) != 1:
+    print 'deleteComputeResource requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteComputeResource(args[0],))
+
+elif cmd == 'addLocalSubmissionDetails':
+  if len(args) != 3:
+    print 'addLocalSubmissionDetails requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addLocalSubmissionDetails(args[0],eval(args[1]),eval(args[2]),))
+
+elif cmd == 'updateLocalSubmissionDetails':
+  if len(args) != 2:
+    print 'updateLocalSubmissionDetails requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateLocalSubmissionDetails(args[0],eval(args[1]),))
+
+elif cmd == 'getLocalJobSubmission':
+  if len(args) != 1:
+    print 'getLocalJobSubmission requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getLocalJobSubmission(args[0],))
+
+elif cmd == 'addSSHJobSubmissionDetails':
+  if len(args) != 3:
+    print 'addSSHJobSubmissionDetails requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addSSHJobSubmissionDetails(args[0],eval(args[1]),eval(args[2]),))
+
+elif cmd == 'getSSHJobSubmission':
+  if len(args) != 1:
+    print 'getSSHJobSubmission requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getSSHJobSubmission(args[0],))
+
+elif cmd == 'addUNICOREJobSubmissionDetails':
+  if len(args) != 3:
+    print 'addUNICOREJobSubmissionDetails requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addUNICOREJobSubmissionDetails(args[0],eval(args[1]),eval(args[2]),))
+
+elif cmd == 'getUnicoreJobSubmission':
+  if len(args) != 1:
+    print 'getUnicoreJobSubmission requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getUnicoreJobSubmission(args[0],))
+
+elif cmd == 'addCloudJobSubmissionDetails':
+  if len(args) != 3:
+    print 'addCloudJobSubmissionDetails requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addCloudJobSubmissionDetails(args[0],eval(args[1]),eval(args[2]),))
+
+elif cmd == 'getCloudJobSubmission':
+  if len(args) != 1:
+    print 'getCloudJobSubmission requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getCloudJobSubmission(args[0],))
+
+elif cmd == 'updateSSHJobSubmissionDetails':
+  if len(args) != 2:
+    print 'updateSSHJobSubmissionDetails requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateSSHJobSubmissionDetails(args[0],eval(args[1]),))
+
+elif cmd == 'updateCloudJobSubmissionDetails':
+  if len(args) != 2:
+    print 'updateCloudJobSubmissionDetails requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateCloudJobSubmissionDetails(args[0],eval(args[1]),))
+
+elif cmd == 'updateUnicoreJobSubmissionDetails':
+  if len(args) != 2:
+    print 'updateUnicoreJobSubmissionDetails requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateUnicoreJobSubmissionDetails(args[0],eval(args[1]),))
+
+elif cmd == 'addLocalDataMovementDetails':
+  if len(args) != 3:
+    print 'addLocalDataMovementDetails requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addLocalDataMovementDetails(args[0],eval(args[1]),eval(args[2]),))
+
+elif cmd == 'updateLocalDataMovementDetails':
+  if len(args) != 2:
+    print 'updateLocalDataMovementDetails requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateLocalDataMovementDetails(args[0],eval(args[1]),))
+
+elif cmd == 'getLocalDataMovement':
+  if len(args) != 1:
+    print 'getLocalDataMovement requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getLocalDataMovement(args[0],))
+
+elif cmd == 'addSCPDataMovementDetails':
+  if len(args) != 3:
+    print 'addSCPDataMovementDetails requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addSCPDataMovementDetails(args[0],eval(args[1]),eval(args[2]),))
+
+elif cmd == 'updateSCPDataMovementDetails':
+  if len(args) != 2:
+    print 'updateSCPDataMovementDetails requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateSCPDataMovementDetails(args[0],eval(args[1]),))
+
+elif cmd == 'getSCPDataMovement':
+  if len(args) != 1:
+    print 'getSCPDataMovement requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getSCPDataMovement(args[0],))
+
+elif cmd == 'addUnicoreDataMovementDetails':
+  if len(args) != 3:
+    print 'addUnicoreDataMovementDetails requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addUnicoreDataMovementDetails(args[0],eval(args[1]),eval(args[2]),))
+
+elif cmd == 'updateUnicoreDataMovementDetails':
+  if len(args) != 2:
+    print 'updateUnicoreDataMovementDetails requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateUnicoreDataMovementDetails(args[0],eval(args[1]),))
+
+elif cmd == 'getUnicoreDataMovement':
+  if len(args) != 1:
+    print 'getUnicoreDataMovement requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getUnicoreDataMovement(args[0],))
+
+elif cmd == 'addGridFTPDataMovementDetails':
+  if len(args) != 3:
+    print 'addGridFTPDataMovementDetails requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addGridFTPDataMovementDetails(args[0],eval(args[1]),eval(args[2]),))
+
+elif cmd == 'updateGridFTPDataMovementDetails':
+  if len(args) != 2:
+    print 'updateGridFTPDataMovementDetails requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateGridFTPDataMovementDetails(args[0],eval(args[1]),))
+
+elif cmd == 'getGridFTPDataMovement':
+  if len(args) != 1:
+    print 'getGridFTPDataMovement requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getGridFTPDataMovement(args[0],))
+
+elif cmd == 'changeJobSubmissionPriority':
+  if len(args) != 2:
+    print 'changeJobSubmissionPriority requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.changeJobSubmissionPriority(args[0],eval(args[1]),))
+
+elif cmd == 'changeDataMovementPriority':
+  if len(args) != 2:
+    print 'changeDataMovementPriority requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.changeDataMovementPriority(args[0],eval(args[1]),))
+
+elif cmd == 'changeJobSubmissionPriorities':
+  if len(args) != 1:
+    print 'changeJobSubmissionPriorities requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.changeJobSubmissionPriorities(eval(args[0]),))
+
+elif cmd == 'changeDataMovementPriorities':
+  if len(args) != 1:
+    print 'changeDataMovementPriorities requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.changeDataMovementPriorities(eval(args[0]),))
+
+elif cmd == 'deleteJobSubmissionInterface':
+  if len(args) != 2:
+    print 'deleteJobSubmissionInterface requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.deleteJobSubmissionInterface(args[0],args[1],))
+
+elif cmd == 'deleteDataMovementInterface':
+  if len(args) != 2:
+    print 'deleteDataMovementInterface requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.deleteDataMovementInterface(args[0],args[1],))
+
+elif cmd == 'registerResourceJobManager':
+  if len(args) != 1:
+    print 'registerResourceJobManager requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.registerResourceJobManager(eval(args[0]),))
+
+elif cmd == 'updateResourceJobManager':
+  if len(args) != 2:
+    print 'updateResourceJobManager requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateResourceJobManager(args[0],eval(args[1]),))
+
+elif cmd == 'getResourceJobManager':
+  if len(args) != 1:
+    print 'getResourceJobManager requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getResourceJobManager(args[0],))
+
+elif cmd == 'deleteResourceJobManager':
+  if len(args) != 1:
+    print 'deleteResourceJobManager requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteResourceJobManager(args[0],))
+
+elif cmd == 'deleteBatchQueue':
+  if len(args) != 2:
+    print 'deleteBatchQueue requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.deleteBatchQueue(args[0],args[1],))
+
+elif cmd == 'registerGatewayResourceProfile':
+  if len(args) != 1:
+    print 'registerGatewayResourceProfile requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.registerGatewayResourceProfile(eval(args[0]),))
+
+elif cmd == 'getGatewayResourceProfile':
+  if len(args) != 1:
+    print 'getGatewayResourceProfile requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getGatewayResourceProfile(args[0],))
+
+elif cmd == 'updateGatewayResourceProfile':
+  if len(args) != 2:
+    print 'updateGatewayResourceProfile requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateGatewayResourceProfile(args[0],eval(args[1]),))
+
+elif cmd == 'deleteGatewayResourceProfile':
+  if len(args) != 1:
+    print 'deleteGatewayResourceProfile requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteGatewayResourceProfile(args[0],))
+
+elif cmd == 'addGatewayComputeResourcePreference':
+  if len(args) != 3:
+    print 'addGatewayComputeResourcePreference requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.addGatewayComputeResourcePreference(args[0],args[1],eval(args[2]),))
+
+elif cmd == 'getGatewayComputeResourcePreference':
+  if len(args) != 2:
+    print 'getGatewayComputeResourcePreference requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.getGatewayComputeResourcePreference(args[0],args[1],))
+
+elif cmd == 'getAllGatewayComputeResourcePreferences':
+  if len(args) != 1:
+    print 'getAllGatewayComputeResourcePreferences requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAllGatewayComputeResourcePreferences(args[0],))
+
+elif cmd == 'getAllGatewayComputeResources':
+  if len(args) != 0:
+    print 'getAllGatewayComputeResources requires 0 args'
+    sys.exit(1)
+  pp.pprint(client.getAllGatewayComputeResources())
+
+elif cmd == 'updateGatewayComputeResourcePreference':
+  if len(args) != 3:
+    print 'updateGatewayComputeResourcePreference requires 3 args'
+    sys.exit(1)
+  pp.pprint(client.updateGatewayComputeResourcePreference(args[0],args[1],eval(args[2]),))
+
+elif cmd == 'deleteGatewayComputeResourcePreference':
+  if len(args) != 2:
+    print 'deleteGatewayComputeResourcePreference requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.deleteGatewayComputeResourcePreference(args[0],args[1],))
+
+elif cmd == 'getAllWorkflows':
+  if len(args) != 1:
+    print 'getAllWorkflows requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getAllWorkflows(args[0],))
+
+elif cmd == 'getWorkflow':
+  if len(args) != 1:
+    print 'getWorkflow requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getWorkflow(args[0],))
+
+elif cmd == 'deleteWorkflow':
+  if len(args) != 1:
+    print 'deleteWorkflow requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.deleteWorkflow(args[0],))
+
+elif cmd == 'registerWorkflow':
+  if len(args) != 2:
+    print 'registerWorkflow requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.registerWorkflow(args[0],eval(args[1]),))
+
+elif cmd == 'updateWorkflow':
+  if len(args) != 2:
+    print 'updateWorkflow requires 2 args'
+    sys.exit(1)
+  pp.pprint(client.updateWorkflow(args[0],eval(args[1]),))
+
+elif cmd == 'getWorkflowTemplateId':
+  if len(args) != 1:
+    print 'getWorkflowTemplateId requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.getWorkflowTemplateId(args[0],))
+
+elif cmd == 'isWorkflowExistWithName':
+  if len(args) != 1:
+    print 'isWorkflowExistWithName requires 1 args'
+    sys.exit(1)
+  pp.pprint(client.isWorkflowExistWithName(args[0],))
+
+else:
+  print 'Unrecognized method %s' % cmd
+  sys.exit(1)
+
+transport.close()