You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@usergrid.apache.org by mr...@apache.org on 2016/08/01 16:54:12 UTC

[37/50] [abbrv] usergrid git commit: additional cleanup

additional cleanup


Project: http://git-wip-us.apache.org/repos/asf/usergrid/repo
Commit: http://git-wip-us.apache.org/repos/asf/usergrid/commit/3bc3d78b
Tree: http://git-wip-us.apache.org/repos/asf/usergrid/tree/3bc3d78b
Diff: http://git-wip-us.apache.org/repos/asf/usergrid/diff/3bc3d78b

Branch: refs/heads/master
Commit: 3bc3d78b5fb407c914b2e7040a686d0d2cbc20b1
Parents: 0952603
Author: Jeff West <jw...@apigee.com>
Authored: Tue Jul 26 16:58:51 2016 -0700
Committer: Jeff West <jw...@apigee.com>
Committed: Tue Jul 26 16:58:51 2016 -0700

----------------------------------------------------------------------
 .../es_tools/cluster_shard_allocation.py        |  15 +-
 .../es_tools/command_sender.py                  |  13 +-
 .../es_tools/index_deleter.py                   |  20 +-
 .../es_tools/index_iterator_size_checker.py     | 285 -------------------
 .../es_tools/index_replica_setter.py            |   1 +
 .../es_tools/index_shard_allocator.py           |  22 +-
 .../es_tools/mapping_retriever.py               |   2 +-
 .../es_tools/monitor_tasks.py                   |   1 +
 .../general/duplicate_name_checker.py           |   1 +
 .../usergrid_tools/general/user_creator.py      |  68 -----
 .../iterators/usergrid_iterator.py              |   2 +-
 .../migration/usergrid_data_exporter.py         |  25 +-
 .../migration/usergrid_data_migrator.py         |   9 +-
 13 files changed, 53 insertions(+), 411 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/es_tools/cluster_shard_allocation.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/es_tools/cluster_shard_allocation.py b/utils/usergrid-util-python/es_tools/cluster_shard_allocation.py
index 2e0fcbd..2b72640 100644
--- a/utils/usergrid-util-python/es_tools/cluster_shard_allocation.py
+++ b/utils/usergrid-util-python/es_tools/cluster_shard_allocation.py
@@ -24,26 +24,30 @@ import requests
 __author__ = 'Jeff West @ ApigeeCorporation'
 
 # The purpose of this script is to set certain nodes in an ElasticSearch cluster to be excluded from index allocation,
-# generally for the purpose of shutting down or restarting the node
+# generally for the purpose of decomissioning or troubleshooting a node.
 
+# you can optionally shut down nodes as they have all the replicas removed from them
 SHUTDOWN_NODES = True
 
-nodes = [
+# these are the nodes which will have shard allocation disabled.  The replicas will then be gradually moved off these
+# nodes.  The amount of time required depends on the size of the index, speed of network, CPU and cluster load
+
+exclude_nodes = [
     'elasticsearch206west',
     'elasticsearch207west',
 ]
 
 base_url = 'http://localhost:9200'
 
-exclude_nodes = nodes
-
 nodes_string = ",".join(exclude_nodes)
 
 print 'Excluding: ' + nodes_string
+
 url_template = '%s/_cluster/settings' % base_url
 
 status_code = 503
 
+# when a cluster is under load, it is possible that a 5xx will be returned.
 while status_code >= 500:
     r = requests.put(
         '%s/_cluster/settings' % base_url,
@@ -62,7 +66,6 @@ ready = False
 nodes_shut_down = []
 
 while not ready:
-
     ready = True
     nodes_left = 0
     bytes_left = 0
@@ -105,4 +108,4 @@ while not ready:
         print 'NOT READY! Waiting for %s nodes and %s GB' % (nodes_left, bytes_left / 1024.0 / 1000000)
         time.sleep(10)
 
-# print 'READY TO MOVE!'
+print 'READY TO MOVE!'

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/es_tools/command_sender.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/es_tools/command_sender.py b/utils/usergrid-util-python/es_tools/command_sender.py
index 8208e78..c2a5797 100644
--- a/utils/usergrid-util-python/es_tools/command_sender.py
+++ b/utils/usergrid-util-python/es_tools/command_sender.py
@@ -25,20 +25,11 @@ __author__ = 'Jeff West @ ApigeeCorporation'
 
 # Simple utility to send commands, useful to not have to recall the proper format
 
-#
-# url = 'http://localhost:9200/_cat/shards'
-#
-# r = requests.get(url)
-#
-# response = r.text
-#
-# print response
-
 data = {
     "commands": [
         {
             "move": {
-                "index": "usergrid__a34ad389-b626-11e4-848f-06b49118d7d0__application_target_final",
+                "index": "usergrid__APPID__application_target_final",
                 "shard": 14,
                 "from_node": "elasticsearch018",
                 "to_node": "elasticsearch021"
@@ -46,7 +37,7 @@ data = {
         },
         {
             "move": {
-                "index": "usergrid__a34ad389-b626-11e4-848f-06b49118d7d0__application_target_final",
+                "index": "usergrid__APPID__application_target_final",
                 "shard": 12,
                 "from_node": "elasticsearch018",
                 "to_node": "elasticsearch009"

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/es_tools/index_deleter.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/es_tools/index_deleter.py b/utils/usergrid-util-python/es_tools/index_deleter.py
index 9b60006..ac21a6f 100644
--- a/utils/usergrid-util-python/es_tools/index_deleter.py
+++ b/utils/usergrid-util-python/es_tools/index_deleter.py
@@ -16,6 +16,7 @@
 #    * specific language governing permissions and limitations
 # * under the License.
 # */
+from multiprocessing import Pool
 
 import requests
 import logging
@@ -58,6 +59,14 @@ counter = 0
 process = False
 delete_counter = 0
 
+indexes_to_delete = []
+
+
+def delete_index(index_name):
+    url_template = '%s/%s' % (url_base, index_name)
+    print 'DELETING Index [%s] %s at URL %s' % (delete_counter, index_name, url_template)
+    response = requests.delete('%s/%s' % (url_base, index))
+
 for index in indices:
     process = False
     counter += 1
@@ -78,9 +87,12 @@ for index in indices:
                 process = False
 
     if process:
-        delete_counter += 1
+        indexes_to_delete.append(index)
+
+print 'Found [%s] indexes to delete: %s' % (len(indexes_to_delete), indexes_to_delete)
 
-        url_template = '%s/%s' % (url_base, index)
-        print 'DELETING Index [%s] %s at URL %s' % (delete_counter, index, url_template)
+if len(indexes_to_delete) > 0:
+    pool = Pool(4)
+    pool.map(delete_index, indexes_to_delete)
 
-        response = requests.delete('%s/%s' % (url_base, index))
+print 'Done!'
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/es_tools/index_iterator_size_checker.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/es_tools/index_iterator_size_checker.py b/utils/usergrid-util-python/es_tools/index_iterator_size_checker.py
deleted file mode 100644
index 03924b2..0000000
--- a/utils/usergrid-util-python/es_tools/index_iterator_size_checker.py
+++ /dev/null
@@ -1,285 +0,0 @@
-# */
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *   http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing,
-# * software distributed under the License is distributed on an
-# * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# * KIND, either express or implied.  See the License for the
-#    * specific language governing permissions and limitations
-# * under the License.
-# */
-
-import json
-import re
-import traceback
-from multiprocessing.pool import Pool
-import requests
-
-index_url_template = 'http://localhost:9200/{index_name}/_search?size={size}&from={from_var}&q=-edgeName:zzzcollzzz|logs'
-
-index_names = [
-    'es-index-name'
-]
-
-baas_url = 'http://localhost:8080/org/{app_id}/{collection}/{entity_id}'
-
-field_part_map = {
-    'mockdata': 'mockData'
-}
-
-
-def update_entity_field(entity, field_name, field_value):
-    entity_copy = entity.copy()
-
-    worked = True
-    is_array = False
-    array_length = 0
-
-    try:
-        parts = field_name.split('.')
-
-        if parts[len(parts) - 1] != 'size':
-            print parts
-            exit()
-
-        change_me = entity_copy
-
-        for i, field_part in enumerate(parts):
-            field_part = field_part_map.get(field_part, field_part)
-
-            if field_part == 'size':
-                break
-
-            if isinstance(change_me, dict):
-                if field_part not in change_me:
-                    worked = False
-                    # print 'ERROR!  field [%s] not in entity: %s' % (field_part, json.dumps(change_me))
-                    break
-
-                change_me = change_me[field_part]
-
-            elif isinstance(change_me, list):
-                array_length = len(change_me)
-
-                if i == len(parts) - 2 and len(parts) > i + 1 and parts[i + 1] == 'size':
-
-                    for j in xrange(0, len(change_me)):
-                        print 'arrau!'
-                        change_me[j] = update_entity_field(change_me[j], '.'.join(parts[i:]), field_value)
-                        # element['size'] = field_value
-
-                elif len(change_me) == 1:
-                    print 'single array'
-                    change_me = change_me[0][field_part]
-                else:
-                    print 'WTF!'
-        try:
-            change_me['size'] = field_value
-        except:
-            if array_length != 1:
-                print traceback.format_exc()
-                print 'damn'
-
-    except:
-        print '---Error updating field [%s] in document: %s' % (field_name, json.dumps(entity))
-        print traceback.format_exc()
-
-    if array_length > 1:
-        print '++++++++ARRAY!!!!! %s' % array_length
-
-    return entity_copy
-
-
-def update_entity_fields(entity, fields):
-    entity_copy = entity.copy()
-
-    for field in fields:
-        field_name = field.get('name')
-
-        if 'string' in field:
-            field_value = field.get('string')
-
-        elif 'long' in field:
-            field_value = field.get('long')
-
-        else:
-            print 'Unexpected field type! %s' % json.dumps(field)
-            return entity_copy
-
-        entity_copy = update_entity_field(entity_copy, field_name, field_value)
-
-    return entity_copy
-
-
-my = {
-    'foo': {
-        'bar': {
-            'color': 'red'
-        }
-    }
-}
-
-fields = [
-    {
-        'name': 'foo.size',
-        'string': '2'
-    },
-    {
-        'name': 'foo.bar.size',
-        'long': 2
-    }
-]
-
-
-def work(item):
-    try:
-        url = 'http://localhost:8080/org/{app_id}/{collection}/{entity_id}'.format(
-            app_id=item[0],
-            collection=item[1],
-            entity_id=item[2]
-        )
-        print url
-        r_get = requests.get(url)
-
-        if r_get.status_code != 200:
-            print 'ERROR GETTING ENTITY AT URL: %s' % url
-            return
-
-        response_json = r_get.json()
-
-        entities = response_json.get('entities')
-
-        if len(entities) <= 0:
-            print 'TOO MANY ENTITIES AT URL: %s' % url
-            return
-
-        entity = entities[0]
-
-        new_entity = update_entity_fields(entity, item[3])
-
-        with open('/Users/ApigeeCorporation/tmp/hack/%s.json' % item[2], 'w') as f:
-            json.dump(entity, f, indent=2)
-
-        with open('/Users/ApigeeCorporation/tmp/hack/%s_new.json' % item[2], 'w') as f:
-            json.dump(new_entity, f, indent=2)
-
-            r_put = requests.put(url, data=json.dumps(new_entity))
-
-            if r_put.status_code == 200:
-                print 'PUT [%s]: %s' % (r_put.status_code, url)
-                pass
-            elif r_put.status_code:
-                print 'PUT [%s]: %s | %s' % (r_put.status_code, url, r_put.text)
-
-    except:
-        print traceback.format_exc()
-
-
-POOL_SIZE = 4
-
-counter = 0
-size = POOL_SIZE * 10
-size = 1000
-
-total_docs = 167501577
-start_from = 0
-from_var = 0
-page = 0
-
-work_items = []
-
-pool = Pool(POOL_SIZE)
-
-keep_going = True
-
-while keep_going:
-    work_items = []
-
-    if from_var > total_docs:
-        keep_going = False
-        break
-
-    from_var = start_from + (page * size)
-    page += 1
-
-    for index_name in index_names:
-
-        index_url = index_url_template.format(index_name=index_name, size=size, from_var=from_var)
-
-        print 'Getting URL: ' + index_url
-
-        r = requests.get(index_url)
-
-        if r.status_code != 200:
-            print r.text
-            exit()
-
-        response = r.json()
-
-        hits = response.get('hits', {}).get('hits')
-
-        re_app_id = re.compile('appId\((.+),')
-        re_ent_id = re.compile('entityId\((.+),')
-        re_type = re.compile('entityId\(.+,(.+)\)')
-
-        print 'Index: %s | hits: %s' % (index_name, len(hits))
-
-        if len(hits) == 0:
-            keep_going = False
-            break
-
-        for hit_data in hits:
-            source = hit_data.get('_source')
-
-            application_id = source.get('applicationId')
-
-            app_id_find = re_app_id.findall(application_id)
-
-            if len(app_id_find) > 0:
-                app_id = app_id_find[0]
-
-                entity_id_tmp = source.get('entityId')
-
-                entity_id_find = re_ent_id.findall(entity_id_tmp)
-                entity_type_find = re_type.findall(entity_id_tmp)
-
-                if len(entity_id_find) > 0 and len(entity_type_find) > 0:
-                    entity_id = entity_id_find[0]
-                    collection = entity_type_find[0]
-                    fields_to_update = []
-
-                    for field in source.get('fields'):
-                        if field.get('name')[-5:] == '.size':
-                            fields_to_update.append(field)
-
-                            print json.dumps(source)
-
-                            work_items.append((app_id, collection, entity_id, fields_to_update))
-
-                    counter += 1
-
-    print 'Work Items: %s' % len(work_items)
-
-    try:
-        pool.map(work, work_items)
-
-
-    except:
-        print traceback.format_exc()
-
-        try:
-            pool.map(work, work_items)
-        except:
-            pass
-
-    print 'Work Done!'
-
-print 'done: %s' % counter

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/es_tools/index_replica_setter.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/es_tools/index_replica_setter.py b/utils/usergrid-util-python/es_tools/index_replica_setter.py
index 1214e48..383c195 100644
--- a/utils/usergrid-util-python/es_tools/index_replica_setter.py
+++ b/utils/usergrid-util-python/es_tools/index_replica_setter.py
@@ -103,6 +103,7 @@ def update_shards(index_name):
         current_replicas = int(index_settings.get('number_of_replicas'))
 
         if current_replicas == NUMBER_VALUE:
+            # no action required
             return
 
         success = False

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/es_tools/index_shard_allocator.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/es_tools/index_shard_allocator.py b/utils/usergrid-util-python/es_tools/index_shard_allocator.py
index d411744..d7f52f5 100644
--- a/utils/usergrid-util-python/es_tools/index_shard_allocator.py
+++ b/utils/usergrid-util-python/es_tools/index_shard_allocator.py
@@ -32,31 +32,13 @@ __author__ = 'Jeff West @ ApigeeCorporation'
 nodes_c32xl = [
     'elasticsearch000eu',
     'elasticsearch001eu',
-    'elasticsearch002eu',
-    'elasticsearch003eu',
-    'elasticsearch004eu',
-    'elasticsearch005eu',
-    'elasticsearch009eu',
-    'elasticsearch010eu',
-    'elasticsearch011eu',
-    'elasticsearch012eu',
-    'elasticsearch013eu',
-    'elasticsearch014eu',
+    'elasticsearch002eu'
 ]
 
 nodes_c34xl = [
     'elasticsearch015eu',
     'elasticsearch018eu',
-    'elasticsearch019eu',
-    'elasticsearch020eu',
-    'elasticsearch021eu',
-    'elasticsearch022eu',
-    'elasticsearch023eu',
-    'elasticsearch024eu',
-    'elasticsearch025eu',
-    'elasticsearch026eu',
-    'elasticsearch027eu',
-    'elasticsearch028eu'
+    'elasticsearch019eu'
 ]
 
 nodes = nodes_c34xl

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/es_tools/mapping_retriever.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/es_tools/mapping_retriever.py b/utils/usergrid-util-python/es_tools/mapping_retriever.py
index 29fbe11..d3a55f0 100644
--- a/utils/usergrid-util-python/es_tools/mapping_retriever.py
+++ b/utils/usergrid-util-python/es_tools/mapping_retriever.py
@@ -49,7 +49,7 @@ for type_name, mapping_detail in mappings.iteritems():
 
     print 'Processing %s' % type_name
 
-    filename = '/Users/ApigeeCorporation/tmp/%s_%s_source_mapping.json' % (
+    filename = '/tmp/%s_%s_source_mapping.json' % (
         SOURCE_INDEX, type_name)
 
     print filename

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/es_tools/monitor_tasks.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/es_tools/monitor_tasks.py b/utils/usergrid-util-python/es_tools/monitor_tasks.py
index b444322..8db30a0 100644
--- a/utils/usergrid-util-python/es_tools/monitor_tasks.py
+++ b/utils/usergrid-util-python/es_tools/monitor_tasks.py
@@ -25,6 +25,7 @@ __author__ = 'Jeff West @ ApigeeCorporation'
 
 # Utility for monitoring pending tasks in ElasticSearch
 
+
 def total_milliseconds(td):
     return (td.microseconds + td.seconds * 1000000) / 1000
 

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/usergrid_tools/general/duplicate_name_checker.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/usergrid_tools/general/duplicate_name_checker.py b/utils/usergrid-util-python/usergrid_tools/general/duplicate_name_checker.py
index a40e097..6b23403 100644
--- a/utils/usergrid-util-python/usergrid_tools/general/duplicate_name_checker.py
+++ b/utils/usergrid-util-python/usergrid_tools/general/duplicate_name_checker.py
@@ -19,6 +19,7 @@
 
 from usergrid import UsergridQueryIterator
 
+
 ### This iterates a collection using GRAPH and checks whether there are more than on entity with the same name
 
 url = 'https://host/org/app/collection?access_token=foo&limit=1000'

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/usergrid_tools/general/user_creator.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/usergrid_tools/general/user_creator.py b/utils/usergrid-util-python/usergrid_tools/general/user_creator.py
deleted file mode 100644
index ace64ee..0000000
--- a/utils/usergrid-util-python/usergrid_tools/general/user_creator.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# */
-# * Licensed to the Apache Software Foundation (ASF) under one
-# * or more contributor license agreements.  See the NOTICE file
-# * distributed with this work for additional information
-# * regarding copyright ownership.  The ASF licenses this file
-# * to you under the Apache License, Version 2.0 (the
-# * "License"); you may not use this file except in compliance
-# * with the License.  You may obtain a copy of the License at
-# *
-# *   http://www.apache.org/licenses/LICENSE-2.0
-# *
-# * Unless required by applicable law or agreed to in writing,
-# * software distributed under the License is distributed on an
-# * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
-# * KIND, either express or implied.  See the License for the
-#    * specific language governing permissions and limitations
-# * under the License.
-# */
-
-import json
-import requests
-
-### This will create an array of org-level management users
-
-users = [
-    'me@example.com'
-]
-
-for user in users:
-
-    post_body = {
-        "username": user,
-        "name": user,
-        "email": user,
-        "password": "test12345"
-    }
-
-    print json.dumps(post_body)
-
-    r = requests.post('http://localhost:8080/management/organizations/asdf/users',
-                      headers={
-                          'Authorization': 'Bearer SADFSDF',
-                          'Content-Type': 'application/json'
-                      },
-                      data=json.dumps(post_body))
-
-    print r.status_code
-
-    print '%s: created (POST) [%s]: %s' % (user, r.status_code, r.text)
-
-    #
-    # r = requests.put('http://localhost:8080/management/users/%s' % user,
-    #                  headers={
-    #                      'Authorization': 'Bearer YWMtFlVrhK8nEeW-AhmxdmpAVAAAAVIYTHxTNSUxpQyUWZQ2LsZxcXSdNtO_lWo',
-    #                      'Content-Type': 'application/json'
-    #                  },
-    #                  data=json.dumps('{"confirmed": true}'))
-    #
-    # print '%s: confirmed: %s' % (user, r.status_code)
-    #
-    # r = requests.put('http://localhost:8080/management/users/%s' % user,
-    #                  headers={
-    #                      'Authorization': 'Bearer YWMtFlVrhK8nEeW-AhmxdmpAVAAAAVIYTHxTNSUxpQyUWZQ2LsZxcXSdNtO_lWo',
-    #                      'Content-Type': 'application/json'
-    #                  },
-    #                  data=json.dumps('{"activated": true}'))
-    #
-    # print '%s: activated: %s' % (user, r.status_code)

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/usergrid_tools/iterators/usergrid_iterator.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/usergrid_tools/iterators/usergrid_iterator.py b/utils/usergrid-util-python/usergrid_tools/iterators/usergrid_iterator.py
index 04328ab..632aa68 100644
--- a/utils/usergrid-util-python/usergrid_tools/iterators/usergrid_iterator.py
+++ b/utils/usergrid-util-python/usergrid_tools/iterators/usergrid_iterator.py
@@ -246,7 +246,7 @@ def create_new(org_name, app_name, collection_name, entity_data, source_client,
             e = c.entity_from_data(entity_data)
             e.put()
 
-        except UsergridError, err:
+        except UsergridError as err:
             logger.error(err)
             raise err
 

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_exporter.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_exporter.py b/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_exporter.py
index e374fbc..edfa1c6 100644
--- a/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_exporter.py
+++ b/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_exporter.py
@@ -17,6 +17,7 @@
 # * under the License.
 # */
 
+from __future__ import print_function
 import os
 import uuid
 from Queue import Empty
@@ -186,7 +187,7 @@ class StatusListener(Process):
                 org_results['apps'][app]['collections'].update(status_map)
 
                 try:
-                    for app, app_data in org_results['apps'].iteritems():
+                    for app, app_data in org_results['apps'].items():
                         app_data['summary'] = {
                             'max_created': -1,
                             'max_modified': -1,
@@ -197,7 +198,7 @@ class StatusListener(Process):
                         }
 
                         if 'collections' in app_data:
-                            for collection, collection_data in app_data['collections'].iteritems():
+                            for collection, collection_data in app_data['collections'].items():
 
                                 app_data['summary']['count'] += collection_data['count']
                                 app_data['summary']['bytes'] += collection_data['bytes']
@@ -236,13 +237,13 @@ class StatusListener(Process):
 
                         status_logger.warn('UPDATED status of org processed: %s' % json.dumps(org_results))
 
-                except KeyboardInterrupt, e:
+                except KeyboardInterrupt as e:
                     raise e
 
                 except:
-                    print traceback.format_exc()
+                    print(traceback.format_exc())
 
-            except KeyboardInterrupt, e:
+            except KeyboardInterrupt as e:
                 status_logger.warn('FINAL status of org processed: %s' % json.dumps(org_results))
                 raise e
 
@@ -260,7 +261,7 @@ class StatusListener(Process):
                     keep_going = False
 
             except:
-                print traceback.format_exc()
+                print(traceback.format_exc())
 
         logger.warn('FINAL status of org processed: %s' % json.dumps(org_results))
 
@@ -305,7 +306,7 @@ class EntityExportWorker(Process):
 
                     collection_worker_logger.info('Done! Finished app/collection: %s / %s' % (app, collection_name))
 
-                except KeyboardInterrupt, e:
+                except KeyboardInterrupt as e:
                     raise e
 
                 except Empty:
@@ -316,9 +317,9 @@ class EntityExportWorker(Process):
                     if empty_count >= 2:
                         keep_going = False
 
-                except Exception, e:
+                except Exception as e:
                     logger.exception('Error in CollectionWorker processing collection [%s]' % collection_name)
-                    print traceback.format_exc()
+                    print(traceback.format_exc())
 
         finally:
             if entity_file is not None:
@@ -790,7 +791,7 @@ def main():
                                                                  limit=config.get('limit'),
                                                                  **config.get('source_endpoint'))
 
-        print 'Retrieving apps from [%s]' % source_org_mgmt_url
+        print('Retrieving apps from [%s]' % source_org_mgmt_url)
         logger.info('Retrieving apps from [%s]' % source_org_mgmt_url)
 
         try:
@@ -807,9 +808,9 @@ def main():
 
             org_apps = r.json().get('data')
 
-        except Exception, e:
+        except Exception as e:
             logger.exception('ERROR Retrieving apps from [%s]' % source_org_mgmt_url)
-            print traceback.format_exc()
+            print(traceback.format_exc())
             logger.critical('Unable to retrieve apps from [%s] and will exit' % source_org_mgmt_url)
             exit()
 

http://git-wip-us.apache.org/repos/asf/usergrid/blob/3bc3d78b/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_migrator.py
----------------------------------------------------------------------
diff --git a/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_migrator.py b/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_migrator.py
index c99aa12..0ed0539 100644
--- a/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_migrator.py
+++ b/utils/usergrid-util-python/usergrid_tools/migration/usergrid_data_migrator.py
@@ -17,6 +17,9 @@
 # * under the License.
 # */
 
+from __future__ import print_function
+from __future__ import print_function
+from __future__ import print_function
 import os
 import uuid
 from Queue import Empty
@@ -2022,7 +2025,7 @@ def filter_apps_and_collections(org_apps):
                 logger.info('App=[%s] filtered Collections=[%s]' % (app, collections))
 
     except:
-        print traceback.format_exc()
+        print(traceback.format_exc())
 
     return app_collecitons
 
@@ -2113,7 +2116,7 @@ def main():
                                                                  limit=config.get('limit'),
                                                                  **config.get('source_endpoint'))
 
-        print 'Retrieving apps from [%s]' % source_org_mgmt_url
+        print('Retrieving apps from [%s]' % source_org_mgmt_url)
         logger.info('Retrieving apps from [%s]' % source_org_mgmt_url)
 
         try:
@@ -2132,7 +2135,7 @@ def main():
 
         except Exception:
             logger.exception('ERROR Retrieving apps from [%s]' % source_org_mgmt_url)
-            print traceback.format_exc()
+            print(traceback.format_exc())
             logger.critical('Unable to retrieve apps from [%s] and will exit' % source_org_mgmt_url)
             exit()