You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2019/11/21 16:51:15 UTC

[GitHub] [airflow] kaxil edited a comment on issue #6396: [AIRFLOW-5726] Delete table as file name in RedshiftToS3Transfer

kaxil edited a comment on issue #6396: [AIRFLOW-5726] Delete table as file name in RedshiftToS3Transfer
URL: https://github.com/apache/airflow/pull/6396#issuecomment-557171010
 
 
   @JavierLopezT Apply the following code and the test will pass. Passes on my local machine:
   
   
   ```diff
   diff --git a/tests/operators/test_redshift_to_s3_operator.py b/tests/operators/test_redshift_to_s3_operator.py
   index 5fd8d46e3..baa4aad32 100644
   --- a/tests/operators/test_redshift_to_s3_operator.py
   +++ b/tests/operators/test_redshift_to_s3_operator.py
   @@ -30,10 +30,13 @@ from airflow.utils.tests import assertEqualIgnoreMultipleSpaces
   
    class TestRedshiftToS3Transfer(unittest.TestCase):
   
   +    @parameterized.expand([
   +        [True, "key/table_"],
   +        [False, "key"],
   +    ])
        @mock.patch("boto3.session.Session")
        @mock.patch("airflow.hooks.postgres_hook.PostgresHook.run")
   -    @parameterized.expand([(True, ), (False, )])
   -    def test_execute(self, mock_run, mock_session, boolean_value):
   +    def test_execute(self, table_as_file_name, expected_s3_key, mock_run, mock_session,):
            access_key = "aws_access_key_id"
            secret_key = "aws_secret_access_key"
            mock_session.return_value = Session(access_key, secret_key)
   @@ -42,7 +45,6 @@ class TestRedshiftToS3Transfer(unittest.TestCase):
            s3_bucket = "bucket"
            s3_key = "key"
            unload_options = ['HEADER', ]
   -        table_as_file_name = boolean_value
   
            RedshiftToS3Transfer(
                schema=schema,
   @@ -62,14 +64,14 @@ class TestRedshiftToS3Transfer(unittest.TestCase):
            select_query = "SELECT * FROM {schema}.{table}".format(schema=schema, table=table)
            unload_query = """
                        UNLOAD ('{select_query}')
   -                    TO 's3://{s3_bucket}/{s3_key}/{table}_'
   +                    TO 's3://{s3_bucket}/{s3_key}'
                        with credentials
                        'aws_access_key_id={access_key};aws_secret_access_key={secret_key}'
                        {unload_options};
                        """.format(select_query=select_query,
   -                               table=table,
                                   s3_bucket=s3_bucket,
   -                               s3_key=s3_key,
   +                               s3_key=expected_s3_key,
                                   access_key=access_key,
                                   secret_key=secret_key,
                                   unload_options=unload_options)
   ```

----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.
 
For queries about this service, please contact Infrastructure at:
users@infra.apache.org


With regards,
Apache Git Services