You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by "Konstantin Privezentsev (JIRA)" <ji...@apache.org> on 2018/01/11 09:16:00 UTC

[jira] [Created] (AIRFLOW-1987) Illegal query generation with MS SQL Server as backend

Konstantin Privezentsev created AIRFLOW-1987:
------------------------------------------------

             Summary: Illegal query generation with MS SQL Server as backend
                 Key: AIRFLOW-1987
                 URL: https://issues.apache.org/jira/browse/AIRFLOW-1987
             Project: Apache Airflow
          Issue Type: Bug
          Components: scheduler, webserver
    Affects Versions: 1.9.0
         Environment: Linux, MS Sql Server 2017
            Reporter: Konstantin Privezentsev
            Priority: Minor


Sheduler and webserver is failed on startup with error like this:
{code:none}
 [2018-01-09 14:56:11,965] {{models.py:189}} INFO - Filling up the DagBag from /mnt/data01/
 [2018-01-09 14:56:11,971] {{jobs.py:1551}} INFO - Exited execute loop
 [2018-01-09 14:56:11,980] {{jobs.py:1565}} INFO - Terminating child PID: 53
 [2018-01-09 14:56:11,981] {{jobs.py:1569}} INFO - Waiting up to 5 seconds for processes to exit...
 [2018-01-09 14:56:11,981] {{jobs.py:384}} ERROR - Got an exception! Propagating...
 Traceback (most recent call last):
   File "/usr/local/lib/python2.7/dist-packages/airflow/jobs.py", line 376, in helper
     pickle_dags)
   File "/usr/local/lib/python2.7/dist-packages/airflow/utils/db.py", line 50, in wrapper
     result = func(*args, **kwargs)
   File "/usr/local/lib/python2.7/dist-packages/airflow/jobs.py", line 1752, in process_file
     dagbag = models.DagBag(file_path)
   File "/usr/local/lib/python2.7/dist-packages/airflow/models.py", line 202, in __init__
     self.collect_dags(dag_folder)
   File "/usr/local/lib/python2.7/dist-packages/airflow/models.py", line 409, in collect_dags
     self.process_file(dag_folder, only_if_updated=only_if_updated)
   File "/usr/local/lib/python2.7/dist-packages/airflow/models.py", line 285, in process_file
     m = imp.load_source(mod_name, filepath)
   File "/mnt/data01/airflow-dags/his/udns_aggr.py", line 57, in <module>
     image='his-sdc-dev-01.avp.ru/his/udns-aggregator:prod')
   File "/opt/airflow/plugins/his.py", line 99, in __init__
     super(SparkOperator, self).__init__(*args, **kwargs)
   File "/opt/airflow/plugins/his.py", line 66, in __init__
     super(DockerOperator, self).__init__(*args, **kwargs)
   File "/usr/local/lib/python2.7/dist-packages/airflow/utils/decorators.py", line 86, in wrapper
     result = func(*args, **kwargs)
   File "/usr/local/lib/python2.7/dist-packages/airflow/operators/docker_operator.py", line 121, 
     super(DockerOperator, self).__init__(*args, **kwargs)
   File "/usr/local/lib/python2.7/dist-packages/airflow/utils/decorators.py", line 86, in wrapper
     result = func(*args, **kwargs)
   File "/usr/local/lib/python2.7/dist-packages/airflow/models.py", line 2219, in __init__
     self.resources = Resources(**(resources or {}))
   File "/usr/local/lib/python2.7/dist-packages/airflow/utils/operator_resources.py", line 104, in 
     cpus = configuration.getint('operators', 'default_cpus')
   File "/usr/local/lib/python2.7/dist-packages/airflow/configuration.py", line 420, in getint
     return conf.getint(section, key)
   File "/usr/local/lib/python2.7/dist-packages/airflow/configuration.py", line 227, in getint
     return int(self.get(section, key))
   File "/usr/local/lib/python2.7/dist-packages/airflow/configuration.py", line 186, in get
     section = str(section).lower()
   File "/usr/local/lib/python2.7/dist-packages/future/types/newstr.py", line 102, in __new__
     return super(newstr, cls).__new__(cls, value)
   File "/usr/local/lib/python2.7/dist-packages/airflow/bin/cli.py", line 73, in sigint_handler
     sys.exit(0)
 SystemExit: 0
 Traceback (most recent call last):
   File "/usr/local/bin/airflow", line 27, in <module>
     args.func(args)
   File "/usr/local/lib/python2.7/dist-packages/airflow/bin/cli.py", line 826, in scheduler
     job.run()
   File "/usr/local/lib/python2.7/dist-packages/airflow/jobs.py", line 203, in run
     self._execute()
   File "/usr/local/lib/python2.7/dist-packages/airflow/jobs.py", line 1549, in _execute
     self._execute_helper(processor_manager)
   File "/usr/local/lib/python2.7/dist-packages/airflow/jobs.py", line 1664, in _execute_helper
     (State.SCHEDULED,))
   File "/usr/local/lib/python2.7/dist-packages/airflow/utils/db.py", line 50, in wrapper
     result = func(*args, **kwargs)
   File "/usr/local/lib/python2.7/dist-packages/airflow/jobs.py", line 1337, in 
     session=session)
   File "/usr/local/lib/python2.7/dist-packages/airflow/utils/db.py", line 50, in wrapper
     result = func(*args, **kwargs)
   File "/usr/local/lib/python2.7/dist-packages/airflow/jobs.py", line 1079, in 
     task_instances_to_examine = ti_query.all()
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 2726, in all
     return list(self)
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 2878, in __iter__
     return self._execute_and_instances(context)
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/orm/query.py", line 2901, in 
     result = conn.execute(querycontext.statement, self._params)
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 948, in execute
     return meth(self, multiparams, params)
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/sql/elements.py", line 269, in 
     return connection._execute_clauseelement(self, multiparams, params)
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 1060, in 
     compiled_sql, distilled_params
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 1200, in 
     context)
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 1413, in 
     exc_info
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/util/compat.py", line 203, in 
     reraise(type(exception), exception, tb=exc_tb, cause=cause)
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/base.py", line 1193, in 
     context)
   File "/usr/local/lib/python2.7/dist-packages/sqlalchemy/engine/default.py", line 507, in 
     cursor.execute(statement, parameters)
 sqlalchemy.exc.ProgrammingError: (pyodbc.ProgrammingError) ('42000', "[42000] [Microsoft][ODBC ecified in a context where a condition is expected, near ')'. (4145) (SQLExecDirectW)") [SQL: u'SELECT task_instance.try_number AS task_instance_try_number, task_instance.task_id AS task_instance_task_id, task_instance.dag_id AS task_instance_dag_id, task_instance.execution_date AS task_instance_execution_date, task_instance.start_date AS task_instance_start_date, task_instance.end_date AS task_instance_end_date, task_instance.duration AS task_instance_duration, task_instance.state AS task_instance_state, task_instance.max_tries AS task_instance_max_tries, task_instance.hostname AS task_instance_hostname, task_instance.unixname AS task_instance_unixname, task_instance.job_id AS task_instance_job_id, task_instance.pool AS task_instance_pool, task_instance.queue AS task_instance_queue, task_instance.priority_weight AS task_instance_priority_weight, task_instance.operator AS task_instance_operator, task_instance.queued_dttm AS task_instance_queued_dttm, task_instance.pid AS task_instance_pid \nFROM task_instance LEFT OUTER JOIN dag_run ON dag_run.dag_id = task_instance.dag_id AND dag_run.execution_date = task_instance.execution_date LEFT OUTER JOIN dag ON dag.dag_id = task_instance.dag_id \nWHERE task_instance.dag_id IN (?) AND (dag_run.run_id IS NULL OR dag_run.run_id NOT LIKE ?) AND (dag.dag_id IS NULL OR NOT dag.is_paused) AND task_instance.state IN (?)'] [parameters: ('udns_kudu_partition_manager', u'backfill_%', u'scheduled')] (Background on this error at: http://sqlalche.me/e/f405)
{code}



--
This message was sent by Atlassian JIRA
(v6.4.14#64029)