You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2020/11/16 15:11:44 UTC

[GitHub] [airflow] orzZXY commented on a change in pull request #11055: Fix fail to convert Pendulum to MySQL datetime

orzZXY commented on a change in pull request #11055:
URL: https://github.com/apache/airflow/pull/11055#discussion_r524339856



##########
File path: airflow/models/dagrun.py
##########
@@ -128,7 +128,8 @@ def refresh_from_db(self, session: Session = None):
         """
         DR = DagRun
 
-        exec_date = func.cast(self.execution_date, DateTime)
+        from airflow.utils.timezone import make_naive
+        exec_date = func.cast(make_naive(self.execution_date), DateTime)

Review comment:
       Yes, you're right. And I thought the lines you post should be able to catch the issue, but it didn't. Need someone better than me to fix it.
   
   Sorry the file uploading is not working and here is the trace:
   # airflow backfill streaming -s 2020-09-08T00:00:00 -e 2020-09-08T01:00:00
   [2020-11-16 14:59:13,121] {__init__.py:51} INFO - Using executor LocalExecutor
   [2020-11-16 14:59:13,122] {dagbag.py:396} INFO - Filling up the DagBag from /home/ec2-user/airflow/dags
   Traceback (most recent call last):
     File "/usr/local/lib/python3.7/site-packages/mysql/connector/conversion.py", line 179, in to_mysql
       return getattr(self, "_{0}_to_mysql".format(type_name))(value)
   AttributeError: 'MySQLConverter' object has no attribute '_pendulum_to_mysql'
   
   During handling of the above exception, another exception occurred:
   
   Traceback (most recent call last):
     File "/usr/local/lib/python3.7/site-packages/mysql/connector/cursor.py", line 395, in _process_params_dict
       conv = to_mysql(conv)
     File "/usr/local/lib/python3.7/site-packages/mysql/connector/conversion.py", line 182, in to_mysql
       "MySQL type".format(type_name))
   TypeError: Python 'pendulum' cannot be converted to a MySQL type
   
   During handling of the above exception, another exception occurred:
   
   Traceback (most recent call last):
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/engine/base.py", line 1284, in _execute_context
       cursor, statement, parameters, context
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/engine/default.py", line 590, in do_execute
       cursor.execute(statement, parameters)
     File "/usr/local/lib/python3.7/site-packages/mysql/connector/cursor.py", line 537, in execute
       stmt, self._process_params_dict(params))
     File "/usr/local/lib/python3.7/site-packages/mysql/connector/cursor.py", line 404, in _process_params_dict
       "Failed processing pyformat-parameters; %s" % err)
   mysql.connector.errors.ProgrammingError: Failed processing pyformat-parameters; Python 'pendulum' cannot be converted to a MySQL type
   
   The above exception was the direct cause of the following exception:
   
   Traceback (most recent call last):
     File "/usr/local/bin/airflow", line 37, in <module>
       args.func(args)
     File "/usr/local/lib/python3.7/site-packages/airflow/utils/cli.py", line 75, in wrapper
       return f(*args, **kwargs)
     File "/usr/local/lib/python3.7/site-packages/airflow/bin/cli.py", line 221, in backfill
       run_backwards=args.run_backwards
     File "/usr/local/lib/python3.7/site-packages/airflow/models/dag.py", line 1415, in run
       job.run()
     File "/usr/local/lib/python3.7/site-packages/airflow/jobs/base_job.py", line 221, in run
       self._execute()
     File "/usr/local/lib/python3.7/site-packages/airflow/utils/db.py", line 74, in wrapper
       return func(*args, **kwargs)
     File "/usr/local/lib/python3.7/site-packages/airflow/jobs/backfill_job.py", line 788, in _execute
       session=session)
     File "/usr/local/lib/python3.7/site-packages/airflow/utils/db.py", line 70, in wrapper
       return func(*args, **kwargs)
     File "/usr/local/lib/python3.7/site-packages/airflow/jobs/backfill_job.py", line 704, in _execute_for_run_dates
       dag_run = self._get_dag_run(next_run_date, session=session)
     File "/usr/local/lib/python3.7/site-packages/airflow/utils/db.py", line 70, in wrapper
       return func(*args, **kwargs)
     File "/usr/local/lib/python3.7/site-packages/airflow/jobs/backfill_job.py", line 313, in _get_dag_run
       conf=self.conf,
     File "/usr/local/lib/python3.7/site-packages/airflow/utils/db.py", line 70, in wrapper
       return func(*args, **kwargs)
     File "/usr/local/lib/python3.7/site-packages/airflow/models/dag.py", line 1471, in create_dagrun
       run.refresh_from_db()
     File "/usr/local/lib/python3.7/site-packages/airflow/utils/db.py", line 74, in wrapper
       return func(*args, **kwargs)
     File "/usr/local/lib/python3.7/site-packages/airflow/models/dagrun.py", line 109, in refresh_from_db
       DR.run_id == self.run_id
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/orm/query.py", line 3436, in one
       ret = self.one_or_none()
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/orm/query.py", line 3405, in one_or_none
       ret = list(self)
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/orm/query.py", line 3481, in __iter__
       return self._execute_and_instances(context)
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/orm/query.py", line 3506, in _execute_and_instances
       result = conn.execute(querycontext.statement, self._params)
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/engine/base.py", line 1020, in execute
       return meth(self, multiparams, params)
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/sql/elements.py", line 298, in _execute_on_connection
       return connection._execute_clauseelement(self, multiparams, params)
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/engine/base.py", line 1139, in _execute_clauseelement
       distilled_params,
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/engine/base.py", line 1324, in _execute_context
       e, statement, parameters, cursor, context
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/engine/base.py", line 1518, in _handle_dbapi_exception
       sqlalchemy_exception, with_traceback=exc_info[2], from_=e
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/util/compat.py", line 178, in raise_
       raise exception
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/engine/base.py", line 1284, in _execute_context
       cursor, statement, parameters, context
     File "/usr/local/lib64/python3.7/site-packages/sqlalchemy/engine/default.py", line 590, in do_execute
       cursor.execute(statement, parameters)
     File "/usr/local/lib/python3.7/site-packages/mysql/connector/cursor.py", line 537, in execute
       stmt, self._process_params_dict(params))
     File "/usr/local/lib/python3.7/site-packages/mysql/connector/cursor.py", line 404, in _process_params_dict
       "Failed processing pyformat-parameters; %s" % err)
   sqlalchemy.exc.ProgrammingError: (mysql.connector.errors.ProgrammingError) Failed processing pyformat-parameters; Python 'pendulum' cannot be converted to a MySQL type
   [SQL: SELECT dag_run.state AS dag_run_state, dag_run.id AS dag_run_id, dag_run.dag_id AS dag_run_dag_id, dag_run.execution_date AS dag_run_execution_date, dag_run.start_date AS dag_run_start_date, dag_run.end_date AS dag_run_end_date, dag_run.run_id AS dag_run_run_id, dag_run.external_trigger AS dag_run_external_trigger, dag_run.conf AS dag_run_conf 
   FROM dag_run 
   WHERE dag_run.dag_id = %(dag_id_1)s AND CAST(dag_run.execution_date AS DATETIME) = CAST(%(param_1)s AS DATETIME) AND dag_run.run_id = %(run_id_1)s]
   [parameters: {'dag_id_1': 'streaming', 'param_1': <Pendulum [2020-09-08T00:10:00+00:00]>, 'run_id_1': 'backfill_2020-09-08T00:10:00+00:00'}]
   (Background on this error at: http://sqlalche.me/e/f405)




----------------------------------------------------------------
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

For queries about this service, please contact Infrastructure at:
users@infra.apache.org