You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by po...@apache.org on 2022/06/04 20:06:25 UTC

[airflow] branch main updated: Fix BigQuery system tests (#24013)

This is an automated email from the ASF dual-hosted git repository.

potiuk pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/airflow.git


The following commit(s) were added to refs/heads/main by this push:
     new c01a5a50b4 Fix BigQuery system tests (#24013)
c01a5a50b4 is described below

commit c01a5a50b44bf2cd0d980123549e3c8d5cfe7521
Author: Bartłomiej Hirsz <ba...@gmail.com>
AuthorDate: Sat Jun 4 22:06:07 2022 +0200

    Fix BigQuery system tests (#24013)
    
    * Change execution_date to data_interval_start in BigQueryInsertJobOperator job_id
    
    Change-Id: Ie1f3bba701169ceb2b39d693da320564de145c0c
    
    * Change jinja template path to relative path
    
    Change-Id: I6cced215124f69e9f4edf8ac08bb71d3ec3c8afc
    
    Co-authored-by: Bartlomiej Hirsz <ba...@google.com>
---
 airflow/providers/google/cloud/operators/bigquery.py               | 2 +-
 tests/providers/google/cloud/operators/test_bigquery.py            | 2 +-
 tests/system/providers/google/bigquery/example_bigquery_queries.py | 3 +--
 3 files changed, 3 insertions(+), 4 deletions(-)

diff --git a/airflow/providers/google/cloud/operators/bigquery.py b/airflow/providers/google/cloud/operators/bigquery.py
index a53b4bad05..2da02493b7 100644
--- a/airflow/providers/google/cloud/operators/bigquery.py
+++ b/airflow/providers/google/cloud/operators/bigquery.py
@@ -2130,7 +2130,7 @@ class BigQueryInsertJobOperator(BaseOperator):
         if self.job_id:
             return f"{self.job_id}_{uniqueness_suffix}"
 
-        exec_date = context['execution_date'].isoformat()
+        exec_date = context['logical_date'].isoformat()
         job_id = f"airflow_{self.dag_id}_{self.task_id}_{exec_date}_{uniqueness_suffix}"
         return re.sub(r"[:\-+.]", "_", job_id)
 
diff --git a/tests/providers/google/cloud/operators/test_bigquery.py b/tests/providers/google/cloud/operators/test_bigquery.py
index 42f8794e8a..82dbbe3aef 100644
--- a/tests/providers/google/cloud/operators/test_bigquery.py
+++ b/tests/providers/google/cloud/operators/test_bigquery.py
@@ -1062,7 +1062,7 @@ class TestBigQueryInsertJobOperator:
     def test_job_id_validity(self, mock_md5, test_dag_id, expected_job_id):
         hash_ = "hash"
         mock_md5.return_value.hexdigest.return_value = hash_
-        context = {"execution_date": datetime(2020, 1, 23)}
+        context = {"logical_date": datetime(2020, 1, 23)}
         configuration = {
             "query": {
                 "query": "SELECT * FROM any",
diff --git a/tests/system/providers/google/bigquery/example_bigquery_queries.py b/tests/system/providers/google/bigquery/example_bigquery_queries.py
index be34df79ef..4efe8f2191 100644
--- a/tests/system/providers/google/bigquery/example_bigquery_queries.py
+++ b/tests/system/providers/google/bigquery/example_bigquery_queries.py
@@ -21,7 +21,6 @@ Example Airflow DAG for Google BigQuery service.
 """
 import os
 from datetime import datetime
-from pathlib import Path
 
 from airflow import models
 from airflow.operators.bash import BashOperator
@@ -40,7 +39,7 @@ from airflow.utils.trigger_rule import TriggerRule
 ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
 PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT")
 LOCATION = "us-east1"
-QUERY_SQL_PATH = str(Path(__file__).parent / "resources" / "example_bigquery_query.sql")
+QUERY_SQL_PATH = "resources/example_bigquery_query.sql"
 
 TABLE_1 = "table1"
 TABLE_2 = "table2"