You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by GitBox <gi...@apache.org> on 2022/10/25 07:32:51 UTC

[GitHub] [airflow] bhirsz commented on a diff in pull request #26858: Migrate Dataproc Metastore system tests according to AIP-47

bhirsz commented on code in PR #26858:
URL: https://github.com/apache/airflow/pull/26858#discussion_r1004107156


##########
tests/system/providers/google/cloud/dataproc_metastore/example_dataproc_metastore.py:
##########
@@ -23,36 +23,39 @@
 
 import datetime
 import os
+from pathlib import Path
 
-from google.cloud.metastore_v1 import MetadataImport
 from google.protobuf.field_mask_pb2 import FieldMask
 
 from airflow import models
-from airflow.models.baseoperator import chain
 from airflow.providers.google.cloud.operators.dataproc_metastore import (
-    DataprocMetastoreCreateBackupOperator,
     DataprocMetastoreCreateMetadataImportOperator,
     DataprocMetastoreCreateServiceOperator,
-    DataprocMetastoreDeleteBackupOperator,
     DataprocMetastoreDeleteServiceOperator,
     DataprocMetastoreExportMetadataOperator,
     DataprocMetastoreGetServiceOperator,
-    DataprocMetastoreListBackupsOperator,
-    DataprocMetastoreRestoreServiceOperator,
     DataprocMetastoreUpdateServiceOperator,
 )
+from airflow.providers.google.cloud.operators.gcs import GCSCreateBucketOperator, GCSDeleteBucketOperator
+from airflow.providers.google.cloud.transfers.local_to_gcs import LocalFilesystemToGCSOperator
+from airflow.utils.trigger_rule import TriggerRule
 
-PROJECT_ID = os.environ.get("GCP_PROJECT_ID", "<PROJECT_ID>")
-SERVICE_ID = os.environ.get("GCP_DATAPROC_METASTORE_SERVICE_ID", "dataproc-metastore-system-tests-service-1")
-BACKUP_ID = os.environ.get("GCP_DATAPROC_METASTORE_BACKUP_ID", "dataproc-metastore-system-tests-backup-1")
-REGION = os.environ.get("GCP_REGION", "<REGION>")
-BUCKET = os.environ.get("GCP_DATAPROC_METASTORE_BUCKET", "INVALID BUCKET NAME")
-METADATA_IMPORT_FILE = os.environ.get("GCS_METADATA_IMPORT_FILE", None)
-GCS_URI = os.environ.get("GCS_URI", f"gs://{BUCKET}/data/hive.sql")
-METADATA_IMPORT_ID = "dataproc-metastore-system-tests-metadata-import-1"
-TIMEOUT = 1200
+DAG_ID = "dataproc_metastore"
+PROJECT_ID = os.environ.get("SYSTEM_TESTS_GCP_PROJECT", "")
+ENV_ID = os.environ.get("SYSTEM_TESTS_ENV_ID")
+
+SERVICE_ID = f"{DAG_ID}-service-{ENV_ID}".replace('_', '-')
+METADATA_IMPORT_ID = f"{DAG_ID}-metadata-{ENV_ID}".replace('_', '-')
+
+REGION = "europe-west1"

Review Comment:
   We need to define a region for the operators so it is a part of the example



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: commits-unsubscribe@airflow.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org