You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by tu...@apache.org on 2020/12/04 09:42:43 UTC

[airflow] branch master updated: Improve error handling in cli and introduce consistency (#12764)

This is an automated email from the ASF dual-hosted git repository.

turbaszek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/master by this push:
     new 1bd98cd  Improve error handling in cli and introduce consistency (#12764)
1bd98cd is described below

commit 1bd98cd54ca46af86465d049e7a1295951413661
Author: Tomek Urbaszek <tu...@gmail.com>
AuthorDate: Fri Dec 4 09:41:41 2020 +0000

    Improve error handling in cli and introduce consistency (#12764)
    
    This PR is a followup after #12375 and #12704 it improves handling
    of some errors in cli commands to avoid show users to much traceback
    and uses SystemExit consistently.
---
 airflow/cli/commands/celery_command.py        |  5 ++-
 airflow/cli/commands/config_command.py        |  7 ++---
 airflow/cli/commands/connection_command.py    | 44 +++++++++++----------------
 airflow/cli/commands/dag_command.py           |  6 ++--
 airflow/cli/commands/db_command.py            |  2 +-
 airflow/cli/commands/kubernetes_command.py    |  2 +-
 airflow/cli/commands/pool_command.py          | 20 ++++++------
 airflow/cli/commands/role_command.py          |  2 ++
 airflow/cli/commands/user_command.py          | 36 +++++++++-------------
 airflow/cli/commands/variable_command.py      | 11 ++++---
 tests/cli/commands/test_celery_command.py     |  2 +-
 tests/cli/commands/test_config_command.py     | 12 +++-----
 tests/cli/commands/test_connection_command.py | 14 ++++-----
 13 files changed, 71 insertions(+), 92 deletions(-)

diff --git a/airflow/cli/commands/celery_command.py b/airflow/cli/commands/celery_command.py
index 8276895..60b0f88 100644
--- a/airflow/cli/commands/celery_command.py
+++ b/airflow/cli/commands/celery_command.py
@@ -16,7 +16,7 @@
 # specific language governing permissions and limitations
 # under the License.
 """Celery command"""
-import sys
+
 from multiprocessing import Process
 from typing import Optional
 
@@ -95,8 +95,7 @@ def _serve_logs(skip_serve_logs: bool = False) -> Optional[Process]:
 def worker(args):
     """Starts Airflow Celery worker"""
     if not settings.validate_session():
-        print("Worker exiting... database connection precheck failed! ")
-        sys.exit(1)
+        raise SystemExit("Worker exiting, database connection precheck failed.")
 
     autoscale = args.autoscale
     skip_serve_logs = args.skip_serve_logs
diff --git a/airflow/cli/commands/config_command.py b/airflow/cli/commands/config_command.py
index 2eedfa5..1c2674f 100644
--- a/airflow/cli/commands/config_command.py
+++ b/airflow/cli/commands/config_command.py
@@ -16,7 +16,6 @@
 # under the License.
 """Config sub-commands"""
 import io
-import sys
 
 import pygments
 from pygments.lexers.configs import IniLexer
@@ -39,12 +38,10 @@ def show_config(args):
 def get_value(args):
     """Get one value from configuration"""
     if not conf.has_section(args.section):
-        print(f'The section [{args.section}] is not found in config.', file=sys.stderr)
-        sys.exit(1)
+        raise SystemExit(f'The section [{args.section}] is not found in config.')
 
     if not conf.has_option(args.section, args.option):
-        print(f'The option [{args.section}/{args.option}] is not found in config.', file=sys.stderr)
-        sys.exit(1)
+        raise SystemExit(f'The option [{args.section}/{args.option}] is not found in config.')
 
     value = conf.get(args.section, args.option)
     print(value)
diff --git a/airflow/cli/commands/connection_command.py b/airflow/cli/commands/connection_command.py
index dd96f9f..5426194 100644
--- a/airflow/cli/commands/connection_command.py
+++ b/airflow/cli/commands/connection_command.py
@@ -112,17 +112,13 @@ def _format_connections(conns: List[Connection], fmt: str) -> str:
 
 
 def _is_stdout(fileio: io.TextIOWrapper) -> bool:
-    if fileio.name == '<stdout>':
-        return True
-    return False
+    return fileio.name == '<stdout>'
 
 
 def _valid_uri(uri: str) -> bool:
     """Check if a URI is valid, by checking if both scheme and netloc are available"""
     uri_parts = urlparse(uri)
-    if uri_parts.scheme == '' or uri_parts.netloc == '':
-        return False
-    return True
+    return uri_parts.scheme != '' and uri_parts.netloc != ''
 
 
 def connections_export(args):
@@ -140,11 +136,10 @@ def connections_export(args):
             _, filetype = os.path.splitext(args.file.name)
             filetype = filetype.lower()
             if filetype not in allowed_formats:
-                msg = (
-                    f"Unsupported file format. "
-                    f"The file must have the extension {', '.join(allowed_formats)}"
+                raise SystemExit(
+                    f"Unsupported file format. The file must have "
+                    f"the extension {', '.join(allowed_formats)}."
                 )
-                raise SystemExit(msg)
 
         connections = session.query(Connection).order_by(Connection.conn_id).all()
         msg = _format_connections(connections, filetype)
@@ -153,7 +148,7 @@ def connections_export(args):
         if _is_stdout(args.file):
             print("Connections successfully exported.", file=sys.stderr)
         else:
-            print(f"Connections successfully exported to {args.file.name}")
+            print(f"Connections successfully exported to {args.file.name}.")
 
 
 alternative_conn_specs = ['conn_type', 'conn_host', 'conn_login', 'conn_password', 'conn_schema', 'conn_port']
@@ -167,20 +162,19 @@ def connections_add(args):
     invalid_args = []
     if args.conn_uri:
         if not _valid_uri(args.conn_uri):
-            msg = f'The URI provided to --conn-uri is invalid: {args.conn_uri}'
-            raise SystemExit(msg)
+            raise SystemExit(f'The URI provided to --conn-uri is invalid: {args.conn_uri}')
         for arg in alternative_conn_specs:
             if getattr(args, arg) is not None:
                 invalid_args.append(arg)
     elif not args.conn_type:
         missing_args.append('conn-uri or conn-type')
     if missing_args:
-        msg = f'The following args are required to add a connection: {missing_args!r}'
-        raise SystemExit(msg)
+        raise SystemExit(f'The following args are required to add a connection: {missing_args!r}')
     if invalid_args:
-        msg = 'The following args are not compatible with the add flag and --conn-uri flag: {invalid!r}'
-        msg = msg.format(invalid=invalid_args)
-        raise SystemExit(msg)
+        raise SystemExit(
+            f'The following args are not compatible with the '
+            f'add flag and --conn-uri flag: {invalid_args!r}'
+        )
 
     if args.conn_uri:
         new_conn = Connection(conn_id=args.conn_id, description=args.conn_description, uri=args.conn_uri)
@@ -201,7 +195,7 @@ def connections_add(args):
     with create_session() as session:
         if not session.query(Connection).filter(Connection.conn_id == new_conn.conn_id).first():
             session.add(new_conn)
-            msg = '\n\tSuccessfully added `conn_id`={conn_id} : {uri}\n'
+            msg = 'Successfully added `conn_id`={conn_id} : {uri}'
             msg = msg.format(
                 conn_id=new_conn.conn_id,
                 uri=args.conn_uri
@@ -223,7 +217,7 @@ def connections_add(args):
             )
             print(msg)
         else:
-            msg = f'\n\tA connection with `conn_id`={new_conn.conn_id} already exists\n'
+            msg = f'A connection with `conn_id`={new_conn.conn_id} already exists.'
             raise SystemExit(msg)
 
 
@@ -234,13 +228,9 @@ def connections_delete(args):
         try:
             to_delete = session.query(Connection).filter(Connection.conn_id == args.conn_id).one()
         except exc.NoResultFound:
-            msg = f'\n\tDid not find a connection with `conn_id`={args.conn_id}\n'
-            raise SystemExit(msg)
+            raise SystemExit(f'Did not find a connection with `conn_id`={args.conn_id}')
         except exc.MultipleResultsFound:
-            msg = f'\n\tFound more than one connection with `conn_id`={args.conn_id}\n'
-            raise SystemExit(msg)
+            raise SystemExit(f'Found more than one connection with `conn_id`={args.conn_id}')
         else:
-            deleted_conn_id = to_delete.conn_id
             session.delete(to_delete)
-            msg = f'\n\tSuccessfully deleted `conn_id`={deleted_conn_id}\n'
-            print(msg)
+            print(f"Successfully deleted connection with `conn_id`={to_delete.conn_id}")
diff --git a/airflow/cli/commands/dag_command.py b/airflow/cli/commands/dag_command.py
index f7b57bc..779e73f 100644
--- a/airflow/cli/commands/dag_command.py
+++ b/airflow/cli/commands/dag_command.py
@@ -177,12 +177,10 @@ def dag_show(args):
     imgcat = args.imgcat
 
     if filename and imgcat:
-        print(
+        raise SystemExit(
             "Option --save and --imgcat are mutually exclusive. "
             "Please remove one option to execute the command.",
-            file=sys.stderr,
         )
-        sys.exit(1)
     elif filename:
         _save_dot_to_file(dot, filename)
     elif imgcat:
@@ -197,7 +195,7 @@ def _display_dot_via_imgcat(dot: Dot):
         proc = subprocess.Popen("imgcat", stdout=subprocess.PIPE, stdin=subprocess.PIPE)
     except OSError as e:
         if e.errno == errno.ENOENT:
-            raise AirflowException(
+            raise SystemExit(
                 "Failed to execute. Make sure the imgcat executables are on your systems \'PATH\'"
             )
         else:
diff --git a/airflow/cli/commands/db_command.py b/airflow/cli/commands/db_command.py
index 00cbe7c..89078c3 100644
--- a/airflow/cli/commands/db_command.py
+++ b/airflow/cli/commands/db_command.py
@@ -29,7 +29,7 @@ def initdb(args):
     """Initializes the metadata database"""
     print("DB: " + repr(settings.engine.url))
     db.initdb()
-    print("Done.")
+    print("Initialization done")
 
 
 def resetdb(args):
diff --git a/airflow/cli/commands/kubernetes_command.py b/airflow/cli/commands/kubernetes_command.py
index c92d117..399bd4b 100644
--- a/airflow/cli/commands/kubernetes_command.py
+++ b/airflow/cli/commands/kubernetes_command.py
@@ -112,7 +112,7 @@ def cleanup_pods(args):
                 try:
                     _delete_pod(pod.metadata.name, namespace)
                 except ApiException as e:
-                    print(f"can't remove POD: {e}", file=sys.stderr)
+                    print(f"Can't remove POD: {e}", file=sys.stderr)
                 continue
             print(f'No action taken on pod {pod_name}')
         continue_token = pod_list.metadata._continue  # pylint: disable=protected-access
diff --git a/airflow/cli/commands/pool_command.py b/airflow/cli/commands/pool_command.py
index 20b7959..d61b116 100644
--- a/airflow/cli/commands/pool_command.py
+++ b/airflow/cli/commands/pool_command.py
@@ -18,7 +18,6 @@
 """Pools sub-commands"""
 import json
 import os
-import sys
 from json import JSONDecodeError
 
 from airflow.api.client import get_current_api_client
@@ -52,8 +51,11 @@ def pool_list(args):
 def pool_get(args):
     """Displays pool info by a given name"""
     api_client = get_current_api_client()
-    pools = [api_client.get_pool(name=args.pool)]
-    _show_pools(pools=pools, output=args.output)
+    try:
+        pools = [api_client.get_pool(name=args.pool)]
+        _show_pools(pools=pools, output=args.output)
+    except PoolNotFound:
+        raise SystemExit(f"Pool {args.pool} does not exist")
 
 
 @cli_utils.action_logging
@@ -74,7 +76,7 @@ def pool_delete(args):
         api_client.delete_pool(name=args.pool)
         print("Pool deleted")
     except PoolNotFound:
-        sys.exit(f"Pool {args.pool} does not exist")
+        raise SystemExit(f"Pool {args.pool} does not exist")
 
 
 @cli_utils.action_logging
@@ -82,10 +84,11 @@ def pool_delete(args):
 def pool_import(args):
     """Imports pools from the file"""
     if not os.path.exists(args.file):
-        sys.exit("Missing pools file.")
-    _, failed = pool_import_helper(args.file)
+        raise SystemExit("Missing pools file.")
+    pools, failed = pool_import_helper(args.file)
     if len(failed) > 0:
-        sys.exit(f"Failed to update pool(s): {', '.join(failed)}")
+        raise SystemExit(f"Failed to update pool(s): {', '.join(failed)}")
+    print(f"Uploaded {len(pools)} pool(s)")
 
 
 def pool_export(args):
@@ -103,7 +106,7 @@ def pool_import_helper(filepath):
     try:  # pylint: disable=too-many-nested-blocks
         pools_json = json.loads(data)
     except JSONDecodeError as e:
-        sys.exit("Invalid json file: " + str(e))
+        raise SystemExit("Invalid json file: " + str(e))
     pools = []
     failed = []
     for k, v in pools_json.items():
@@ -111,7 +114,6 @@ def pool_import_helper(filepath):
             pools.append(api_client.create_pool(name=k, slots=v["slots"], description=v["description"]))
         else:
             failed.append(k)
-    print(f"{len(pools)} of {len(pools_json)} pool(s) successfully updated.")
     return pools, failed
 
 
diff --git a/airflow/cli/commands/role_command.py b/airflow/cli/commands/role_command.py
index a760610..f9fb701 100644
--- a/airflow/cli/commands/role_command.py
+++ b/airflow/cli/commands/role_command.py
@@ -35,8 +35,10 @@ def roles_list(args):
 
 
 @cli_utils.action_logging
+@suppress_logs_and_warning()
 def roles_create(args):
     """Creates new empty role in DB"""
     appbuilder = cached_app().appbuilder  # pylint: disable=no-member
     for role_name in args.role:
         appbuilder.sm.add_role(role_name)
+    print(f"Added {len(args.role)} role(s)")
diff --git a/airflow/cli/commands/user_command.py b/airflow/cli/commands/user_command.py
index a5f8ec4..3fd80bd 100644
--- a/airflow/cli/commands/user_command.py
+++ b/airflow/cli/commands/user_command.py
@@ -22,7 +22,6 @@ import os
 import random
 import re
 import string
-import sys
 
 from airflow.cli.simple_table import AirflowConsole
 from airflow.utils import cli as cli_utils
@@ -59,16 +58,16 @@ def users_create(args):
         password = getpass.getpass('Password:')
         password_confirmation = getpass.getpass('Repeat for confirmation:')
         if password != password_confirmation:
-            raise SystemExit('Passwords did not match!')
+            raise SystemExit('Passwords did not match')
 
     if appbuilder.sm.find_user(args.username):
         print(f'{args.username} already exist in the db')
         return
     user = appbuilder.sm.add_user(args.username, args.firstname, args.lastname, args.email, role, password)
     if user:
-        print(f'{args.role} user {args.username} created.')
+        print(f'{args.role} user {args.username} created')
     else:
-        raise SystemExit('Failed to create user.')
+        raise SystemExit('Failed to create user')
 
 
 @cli_utils.action_logging
@@ -79,12 +78,12 @@ def users_delete(args):
     try:
         user = next(u for u in appbuilder.sm.get_all_users() if u.username == args.username)
     except StopIteration:
-        raise SystemExit(f'{args.username} is not a valid user.')
+        raise SystemExit(f'{args.username} is not a valid user')
 
     if appbuilder.sm.del_register_user(user):
-        print(f'User {args.username} deleted.')
+        print(f'User {args.username} deleted')
     else:
-        raise SystemExit('Failed to delete user.')
+        raise SystemExit('Failed to delete user')
 
 
 @cli_utils.action_logging
@@ -110,16 +109,16 @@ def users_manage_role(args, remove=False):
         if role in user.roles:
             user.roles = [r for r in user.roles if r != role]
             appbuilder.sm.update_user(user)
-            print(f'User "{user}" removed from role "{args.role}".')
+            print(f'User "{user}" removed from role "{args.role}"')
         else:
-            raise SystemExit(f'User "{user}" is not a member of role "{args.role}".')
+            raise SystemExit(f'User "{user}" is not a member of role "{args.role}"')
     else:
         if role in user.roles:
-            raise SystemExit(f'User "{user}" is already a member of role "{args.role}".')
+            raise SystemExit(f'User "{user}" is already a member of role "{args.role}"')
         else:
             user.roles.append(role)
             appbuilder.sm.update_user(user)
-            print(f'User "{user}" added to role "{args.role}".')
+            print(f'User "{user}" added to role "{args.role}"')
 
 
 def users_export(args):
@@ -153,16 +152,14 @@ def users_import(args):
     """Imports users from the json file"""
     json_file = getattr(args, 'import')
     if not os.path.exists(json_file):
-        print("File '{}' does not exist")
-        sys.exit(1)
+        raise SystemExit(f"File '{json_file}' does not exist")
 
     users_list = None  # pylint: disable=redefined-outer-name
     try:
         with open(json_file) as file:
             users_list = json.loads(file.read())
     except ValueError as e:
-        print(f"File '{json_file}' is not valid JSON. Error: {e}")
-        sys.exit(1)
+        raise SystemExit(f"File '{json_file}' is not valid JSON. Error: {e}")
 
     users_created, users_updated = _import_users(users_list)
     if users_created:
@@ -183,16 +180,14 @@ def _import_users(users_list):  # pylint: disable=redefined-outer-name
             role = appbuilder.sm.find_role(rolename)
             if not role:
                 valid_roles = appbuilder.sm.get_all_roles()
-                print(f"Error: '{rolename}' is not a valid role. Valid roles are: {valid_roles}")
-                sys.exit(1)
+                raise SystemExit(f"Error: '{rolename}' is not a valid role. Valid roles are: {valid_roles}")
             else:
                 roles.append(role)
 
         required_fields = ['username', 'firstname', 'lastname', 'email', 'roles']
         for field in required_fields:
             if not user.get(field):
-                print(f"Error: '{field}' is a required field, but was not specified")
-                sys.exit(1)
+                raise SystemExit(f"Error: '{field}' is a required field, but was not specified")
 
         existing_user = appbuilder.sm.find_user(email=user['email'])
         if existing_user:
@@ -202,12 +197,11 @@ def _import_users(users_list):  # pylint: disable=redefined-outer-name
             existing_user.last_name = user['lastname']
 
             if existing_user.username != user['username']:
-                print(
+                raise SystemExit(
                     "Error: Changing the username is not allowed - "
                     "please delete and recreate the user with "
                     "email '{}'".format(user['email'])
                 )
-                sys.exit(1)
 
             appbuilder.sm.update_user(existing_user)
             users_updated.append(user['email'])
diff --git a/airflow/cli/commands/variable_command.py b/airflow/cli/commands/variable_command.py
index b3f34ed..55cf94b 100644
--- a/airflow/cli/commands/variable_command.py
+++ b/airflow/cli/commands/variable_command.py
@@ -18,7 +18,6 @@
 """Variable subcommands"""
 import json
 import os
-import sys
 from json import JSONDecodeError
 
 from airflow.cli.simple_table import AirflowConsole
@@ -36,6 +35,7 @@ def variables_list(args):
     AirflowConsole().print_as(data=variables, output=args.output, mapper=lambda x: {"key": x.key})
 
 
+@suppress_logs_and_warning()
 def variables_get(args):
     """Displays variable by a given name"""
     try:
@@ -46,20 +46,21 @@ def variables_get(args):
             var = Variable.get(args.key, deserialize_json=args.json, default_var=args.default)
             print(var)
     except (ValueError, KeyError) as e:
-        print(str(e), file=sys.stderr)
-        sys.exit(1)
+        raise SystemExit(str(e).strip("'\""))
 
 
 @cli_utils.action_logging
 def variables_set(args):
     """Creates new variable with a given name and value"""
     Variable.set(args.key, args.value, serialize_json=args.json)
+    print(f"Variable {args.key} created")
 
 
 @cli_utils.action_logging
 def variables_delete(args):
     """Deletes variable by a given name"""
     Variable.delete(args.key)
+    print(f"Variable {args.key} deleted")
 
 
 @cli_utils.action_logging
@@ -95,7 +96,7 @@ def _import_helper(filepath):
                 fail_count += 1
             else:
                 suc_count += 1
-        print("{} of {} variables successfully updated.".format(suc_count, len(var_json)))
+        print(f"{suc_count} of {len(var_json)} variables successfully updated.")
         if fail_count:
             print(f"{fail_count} variable(s) failed to be updated.")
 
@@ -116,4 +117,4 @@ def _variable_export_helper(filepath):
 
     with open(filepath, 'w') as varfile:
         varfile.write(json.dumps(var_dict, sort_keys=True, indent=4))
-    print("{} variables successfully exported to {}".format(len(var_dict), filepath))
+    print(f"{len(var_dict)} variables successfully exported to {filepath}")
diff --git a/tests/cli/commands/test_celery_command.py b/tests/cli/commands/test_celery_command.py
index 01fdb3a..2a37e27 100644
--- a/tests/cli/commands/test_celery_command.py
+++ b/tests/cli/commands/test_celery_command.py
@@ -40,7 +40,7 @@ class TestWorkerPrecheck(unittest.TestCase):
         mock_validate_session.return_value = False
         with self.assertRaises(SystemExit) as cm:
             celery_command.worker(Namespace(queues=1, concurrency=1))
-        self.assertEqual(cm.exception.code, 1)
+        self.assertEqual(str(cm.exception), "Worker exiting, database connection precheck failed.")
 
     @conf_vars({('celery', 'worker_precheck'): 'False'})
     def test_worker_precheck_exception(self):
diff --git a/tests/cli/commands/test_config_command.py b/tests/cli/commands/test_config_command.py
index 5668654..532a95f 100644
--- a/tests/cli/commands/test_config_command.py
+++ b/tests/cli/commands/test_config_command.py
@@ -60,25 +60,21 @@ class TestCliConfigGetValue(unittest.TestCase):
         mock_conf.has_section.return_value = False
         mock_conf.has_option.return_value = True
 
-        with contextlib.redirect_stderr(io.StringIO()) as temp_stderr, self.assertRaises(SystemExit) as cm:
+        with self.assertRaises(SystemExit) as err:
             config_command.get_value(
                 self.parser.parse_args(['config', 'get-value', 'missing-section', 'dags_folder'])
             )
-        self.assertEqual(1, cm.exception.code)
-        self.assertEqual(
-            "The section [missing-section] is not found in config.", temp_stderr.getvalue().strip()
-        )
+        self.assertEqual("The section [missing-section] is not found in config.", str(err.exception))
 
     @mock.patch("airflow.cli.commands.config_command.conf")
     def test_should_raise_exception_when_option_is_missing(self, mock_conf):
         mock_conf.has_section.return_value = True
         mock_conf.has_option.return_value = False
 
-        with contextlib.redirect_stderr(io.StringIO()) as temp_stderr, self.assertRaises(SystemExit) as cm:
+        with self.assertRaises(SystemExit) as err:
             config_command.get_value(
                 self.parser.parse_args(['config', 'get-value', 'missing-section', 'dags_folder'])
             )
-        self.assertEqual(1, cm.exception.code)
         self.assertEqual(
-            "The option [missing-section/dags_folder] is not found in config.", temp_stderr.getvalue().strip()
+            "The option [missing-section/dags_folder] is not found in config.", str(err.exception)
         )
diff --git a/tests/cli/commands/test_connection_command.py b/tests/cli/commands/test_connection_command.py
index 700d819..1da1174 100644
--- a/tests/cli/commands/test_connection_command.py
+++ b/tests/cli/commands/test_connection_command.py
@@ -498,7 +498,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "--conn-uri=%s" % TEST_URL,
                     "--conn-description=new0 description",
                 ],
-                "\tSuccessfully added `conn_id`=new0 : postgresql://airflow:airflow@host:5432/airflow",
+                "Successfully added `conn_id`=new0 : postgresql://airflow:airflow@host:5432/airflow",
                 {
                     "conn_type": "postgres",
                     "description": "new0 description",
@@ -518,7 +518,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "--conn-uri=%s" % TEST_URL,
                     "--conn-description=new1 description",
                 ],
-                "\tSuccessfully added `conn_id`=new1 : postgresql://airflow:airflow@host:5432/airflow",
+                "Successfully added `conn_id`=new1 : postgresql://airflow:airflow@host:5432/airflow",
                 {
                     "conn_type": "postgres",
                     "description": "new1 description",
@@ -539,7 +539,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "--conn-extra",
                     "{'extra': 'yes'}",
                 ],
-                "\tSuccessfully added `conn_id`=new2 : postgresql://airflow:airflow@host:5432/airflow",
+                "Successfully added `conn_id`=new2 : postgresql://airflow:airflow@host:5432/airflow",
                 {
                     "conn_type": "postgres",
                     "description": None,
@@ -562,7 +562,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "--conn-description",
                     "new3 description",
                 ],
-                "\tSuccessfully added `conn_id`=new3 : postgresql://airflow:airflow@host:5432/airflow",
+                "Successfully added `conn_id`=new3 : postgresql://airflow:airflow@host:5432/airflow",
                 {
                     "conn_type": "postgres",
                     "description": "new3 description",
@@ -587,7 +587,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "--conn-schema=airflow",
                     "--conn-description=  new4 description  ",
                 ],
-                "\tSuccessfully added `conn_id`=new4 : hive_metastore://airflow:******@host:9083/airflow",
+                "Successfully added `conn_id`=new4 : hive_metastore://airflow:******@host:9083/airflow",
                 {
                     "conn_type": "hive_metastore",
                     "description": "  new4 description  ",
@@ -611,7 +611,7 @@ class TestCliAddConnections(unittest.TestCase):
                     "{'extra': 'yes'}",
                     "--conn-description=new5 description",
                 ],
-                "\tSuccessfully added `conn_id`=new5 : google_cloud_platform://:@:",
+                "Successfully added `conn_id`=new5 : google_cloud_platform://:@:",
                 {
                     "conn_type": "google_cloud_platform",
                     "description": "new5 description",
@@ -703,7 +703,7 @@ class TestCliDeleteConnections(unittest.TestCase):
             stdout = stdout.getvalue()
 
         # Check deletion stdout
-        self.assertIn("\tSuccessfully deleted `conn_id`=new1", stdout)
+        self.assertIn("Successfully deleted connection with `conn_id`=new1", stdout)
 
         # Check deletions
         result = session.query(Connection).filter(Connection.conn_id == "new1").first()