You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by cr...@apache.org on 2018/01/03 17:48:18 UTC
[18/35] incubator-airflow-site git commit: 1.9.0
http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_modules/bigquery_hook.html
----------------------------------------------------------------------
diff --git a/_modules/bigquery_hook.html b/_modules/bigquery_hook.html
index 1f8170a..e58a93b 100644
--- a/_modules/bigquery_hook.html
+++ b/_modules/bigquery_hook.html
@@ -13,6 +13,8 @@
+
+
@@ -30,6 +32,9 @@
+ <link rel="index" title="Index"
+ href="../genindex.html"/>
+ <link rel="search" title="Search" href="../search.html"/>
<link rel="top" title="Airflow Documentation" href="../index.html"/>
<link rel="up" title="Module code" href="index.html"/>
@@ -40,6 +45,7 @@
<body class="wy-body-for-nav" role="document">
+
<div class="wy-grid-for-nav">
@@ -76,7 +82,10 @@
- <ul>
+
+
+
+ <ul>
<li class="toctree-l1"><a class="reference internal" href="../project.html">Project</a></li>
<li class="toctree-l1"><a class="reference internal" href="../license.html">License</a></li>
<li class="toctree-l1"><a class="reference internal" href="../start.html">Quick Start</a></li>
@@ -90,6 +99,8 @@
<li class="toctree-l1"><a class="reference internal" href="../scheduler.html">Scheduling & Triggers</a></li>
<li class="toctree-l1"><a class="reference internal" href="../plugins.html">Plugins</a></li>
<li class="toctree-l1"><a class="reference internal" href="../security.html">Security</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../api.html">Experimental Rest API</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../integration.html">Integration</a></li>
<li class="toctree-l1"><a class="reference internal" href="../faq.html">FAQ</a></li>
<li class="toctree-l1"><a class="reference internal" href="../code.html">API Reference</a></li>
</ul>
@@ -104,8 +115,10 @@
<nav class="wy-nav-top" role="navigation" aria-label="top navigation">
- <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
- <a href="../index.html">Airflow</a>
+
+ <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+ <a href="../index.html">Airflow</a>
+
</nav>
@@ -118,19 +131,36 @@
+
+
+
+
+
+
+
+
+
+
<div role="navigation" aria-label="breadcrumbs navigation">
+
<ul class="wy-breadcrumbs">
- <li><a href="../index.html">Docs</a> »</li>
-
+
+ <li><a href="../index.html">Docs</a> »</li>
+
<li><a href="index.html">Module code</a> »</li>
-
- <li>bigquery_hook</li>
+
+ <li>bigquery_hook</li>
+
+
<li class="wy-breadcrumbs-aside">
-
+
</li>
+
</ul>
+
+
<hr/>
</div>
<div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
@@ -157,25 +187,24 @@
<span class="sd">implementation for BigQuery.</span>
<span class="sd">"""</span>
-<span class="kn">from</span> <span class="nn">builtins</span> <span class="kn">import</span> <span class="nb">range</span>
-<span class="kn">from</span> <span class="nn">past.builtins</span> <span class="kn">import</span> <span class="nb">basestring</span>
-
-<span class="kn">import</span> <span class="nn">logging</span>
<span class="kn">import</span> <span class="nn">time</span>
-<span class="kn">from</span> <span class="nn">airflow.contrib.hooks.gcp_api_base_hook</span> <span class="kn">import</span> <span class="n">GoogleCloudBaseHook</span>
-<span class="kn">from</span> <span class="nn">airflow.hooks.dbapi_hook</span> <span class="kn">import</span> <span class="n">DbApiHook</span>
-<span class="kn">from</span> <span class="nn">apiclient.discovery</span> <span class="kn">import</span> <span class="n">build</span>
-<span class="kn">from</span> <span class="nn">pandas.io.gbq</span> <span class="kn">import</span> <span class="n">GbqConnector</span><span class="p">,</span> \
+<span class="kn">from</span> <span class="nn">apiclient.discovery</span> <span class="k">import</span> <span class="n">build</span><span class="p">,</span> <span class="n">HttpError</span>
+<span class="kn">from</span> <span class="nn">googleapiclient</span> <span class="k">import</span> <span class="n">errors</span>
+<span class="kn">from</span> <span class="nn">builtins</span> <span class="k">import</span> <span class="nb">range</span>
+<span class="kn">from</span> <span class="nn">pandas_gbq.gbq</span> <span class="k">import</span> <span class="n">GbqConnector</span><span class="p">,</span> \
<span class="n">_parse_data</span> <span class="k">as</span> <span class="n">gbq_parse_data</span><span class="p">,</span> \
<span class="n">_check_google_client_version</span> <span class="k">as</span> <span class="n">gbq_check_google_client_version</span><span class="p">,</span> \
<span class="n">_test_google_api_imports</span> <span class="k">as</span> <span class="n">gbq_test_google_api_imports</span>
-<span class="kn">from</span> <span class="nn">pandas.tools.merge</span> <span class="kn">import</span> <span class="n">concat</span>
+<span class="kn">from</span> <span class="nn">pandas.tools.merge</span> <span class="k">import</span> <span class="n">concat</span>
+<span class="kn">from</span> <span class="nn">past.builtins</span> <span class="k">import</span> <span class="n">basestring</span>
-<span class="n">logging</span><span class="o">.</span><span class="n">getLogger</span><span class="p">(</span><span class="s2">"bigquery"</span><span class="p">)</span><span class="o">.</span><span class="n">setLevel</span><span class="p">(</span><span class="n">logging</span><span class="o">.</span><span class="n">INFO</span><span class="p">)</span>
+<span class="kn">from</span> <span class="nn">airflow.contrib.hooks.gcp_api_base_hook</span> <span class="k">import</span> <span class="n">GoogleCloudBaseHook</span>
+<span class="kn">from</span> <span class="nn">airflow.hooks.dbapi_hook</span> <span class="k">import</span> <span class="n">DbApiHook</span>
+<span class="kn">from</span> <span class="nn">airflow.utils.log.logging_mixin</span> <span class="k">import</span> <span class="n">LoggingMixin</span>
-<div class="viewcode-block" id="BigQueryHook"><a class="viewcode-back" href="../code.html#airflow.contrib.hooks.BigQueryHook">[docs]</a><span class="k">class</span> <span class="nc">BigQueryHook</span><span class="p">(</span><span class="n">GoogleCloudBaseHook</span><span class="p">,</span> <span class="n">DbApiHook</span><span class="p">):</span>
+<div class="viewcode-block" id="BigQueryHook"><a class="viewcode-back" href="../code.html#airflow.contrib.hooks.BigQueryHook">[docs]</a><span class="k">class</span> <span class="nc">BigQueryHook</span><span class="p">(</span><span class="n">GoogleCloudBaseHook</span><span class="p">,</span> <span class="n">DbApiHook</span><span class="p">,</span> <span class="n">LoggingMixin</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Interact with BigQuery. This hook uses the Google Cloud Platform</span>
<span class="sd"> connection.</span>
@@ -184,8 +213,8 @@
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span>
<span class="n">bigquery_conn_id</span><span class="o">=</span><span class="s1">'bigquery_default'</span><span class="p">,</span>
- <span class="n">delegate_to</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
- <span class="nb">super</span><span class="p">(</span><span class="n">BigQueryHook</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="n">__init__</span><span class="p">(</span>
+ <span class="n">delegate_to</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
+ <span class="nb">super</span><span class="p">(</span><span class="n">BigQueryHook</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span>
<span class="n">conn_id</span><span class="o">=</span><span class="n">bigquery_conn_id</span><span class="p">,</span>
<span class="n">delegate_to</span><span class="o">=</span><span class="n">delegate_to</span><span class="p">)</span>
@@ -204,7 +233,7 @@
<span class="n">http_authorized</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_authorize</span><span class="p">()</span>
<span class="k">return</span> <span class="n">build</span><span class="p">(</span><span class="s1">'bigquery'</span><span class="p">,</span> <span class="s1">'v2'</span><span class="p">,</span> <span class="n">http</span><span class="o">=</span><span class="n">http_authorized</span><span class="p">)</span></div>
-<div class="viewcode-block" id="BigQueryHook.insert_rows"><a class="viewcode-back" href="../code.html#airflow.contrib.hooks.BigQueryHook.insert_rows">[docs]</a> <span class="k">def</span> <span class="nf">insert_rows</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">rows</span><span class="p">,</span> <span class="n">target_fields</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span> <span class="n">commit_every</span><span class="o">=</span><span class="mi">1000</span><span class="p">):</span>
+<div class="viewcode-block" id="BigQueryHook.insert_rows"><a class="viewcode-back" href="../code.html#airflow.contrib.hooks.BigQueryHook.insert_rows">[docs]</a> <span class="k">def</span> <span class="nf">insert_rows</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">rows</span><span class="p">,</span> <span class="n">target_fields</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">commit_every</span><span class="o">=</span><span class="mi">1000</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Insertion is currently unsupported. Theoretically, you could use</span>
<span class="sd"> BigQuery's streaming API to insert rows into a table, but this hasn't</span>
@@ -212,7 +241,7 @@
<span class="sd"> """</span>
<span class="k">raise</span> <span class="ne">NotImplementedError</span><span class="p">()</span></div>
-<div class="viewcode-block" id="BigQueryHook.get_pandas_df"><a class="viewcode-back" href="../code.html#airflow.contrib.hooks.BigQueryHook.get_pandas_df">[docs]</a> <span class="k">def</span> <span class="nf">get_pandas_df</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">bql</span><span class="p">,</span> <span class="n">parameters</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+<div class="viewcode-block" id="BigQueryHook.get_pandas_df"><a class="viewcode-back" href="../code.html#airflow.contrib.hooks.BigQueryHook.get_pandas_df">[docs]</a> <span class="k">def</span> <span class="nf">get_pandas_df</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">bql</span><span class="p">,</span> <span class="n">parameters</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">dialect</span><span class="o">=</span><span class="s1">'legacy'</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Returns a Pandas DataFrame for the results produced by a BigQuery</span>
<span class="sd"> query. The DbApiHook method must be overridden because Pandas</span>
@@ -223,10 +252,14 @@
<span class="sd"> :param bql: The BigQuery SQL to execute.</span>
<span class="sd"> :type bql: string</span>
+<span class="sd"> :param parameters: The parameters to render the SQL query with (not used, leave to override superclass method)</span>
+<span class="sd"> :type parameters: mapping or iterable</span>
+<span class="sd"> :param dialect: Dialect of BigQuery SQL – legacy SQL or standard SQL</span>
+<span class="sd"> :type dialect: string in {'legacy', 'standard'}, default 'legacy'</span>
<span class="sd"> """</span>
<span class="n">service</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_service</span><span class="p">()</span>
<span class="n">project</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_get_field</span><span class="p">(</span><span class="s1">'project'</span><span class="p">)</span>
- <span class="n">connector</span> <span class="o">=</span> <span class="n">BigQueryPandasConnector</span><span class="p">(</span><span class="n">project</span><span class="p">,</span> <span class="n">service</span><span class="p">)</span>
+ <span class="n">connector</span> <span class="o">=</span> <span class="n">BigQueryPandasConnector</span><span class="p">(</span><span class="n">project</span><span class="p">,</span> <span class="n">service</span><span class="p">,</span> <span class="n">dialect</span><span class="o">=</span><span class="n">dialect</span><span class="p">)</span>
<span class="n">schema</span><span class="p">,</span> <span class="n">pages</span> <span class="o">=</span> <span class="n">connector</span><span class="o">.</span><span class="n">run_query</span><span class="p">(</span><span class="n">bql</span><span class="p">)</span>
<span class="n">dataframe_list</span> <span class="o">=</span> <span class="p">[]</span>
@@ -235,9 +268,35 @@
<span class="n">dataframe_list</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">gbq_parse_data</span><span class="p">(</span><span class="n">schema</span><span class="p">,</span> <span class="n">page</span><span class="p">))</span>
<span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">dataframe_list</span><span class="p">)</span> <span class="o">></span> <span class="mi">0</span><span class="p">:</span>
- <span class="k">return</span> <span class="n">concat</span><span class="p">(</span><span class="n">dataframe_list</span><span class="p">,</span> <span class="n">ignore_index</span><span class="o">=</span><span class="bp">True</span><span class="p">)</span>
+ <span class="k">return</span> <span class="n">concat</span><span class="p">(</span><span class="n">dataframe_list</span><span class="p">,</span> <span class="n">ignore_index</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
<span class="k">else</span><span class="p">:</span>
- <span class="k">return</span> <span class="n">gbq_parse_data</span><span class="p">(</span><span class="n">schema</span><span class="p">,</span> <span class="p">[])</span></div></div>
+ <span class="k">return</span> <span class="n">gbq_parse_data</span><span class="p">(</span><span class="n">schema</span><span class="p">,</span> <span class="p">[])</span></div>
+
+<div class="viewcode-block" id="BigQueryHook.table_exists"><a class="viewcode-back" href="../code.html#airflow.contrib.hooks.BigQueryHook.table_exists">[docs]</a> <span class="k">def</span> <span class="nf">table_exists</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">project_id</span><span class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> <span class="n">table_id</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Checks for the existence of a table in Google BigQuery.</span>
+
+<span class="sd"> :param project_id: The Google cloud project in which to look for the table. The connection supplied to the hook</span>
+<span class="sd"> must provide access to the specified project.</span>
+<span class="sd"> :type project_id: string</span>
+<span class="sd"> :param dataset_id: The name of the dataset in which to look for the table.</span>
+<span class="sd"> storage bucket.</span>
+<span class="sd"> :type dataset_id: string</span>
+<span class="sd"> :param table_id: The name of the table to check the existence of.</span>
+<span class="sd"> :type table_id: string</span>
+<span class="sd"> """</span>
+ <span class="n">service</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_service</span><span class="p">()</span>
+ <span class="k">try</span><span class="p">:</span>
+ <span class="n">service</span><span class="o">.</span><span class="n">tables</span><span class="p">()</span><span class="o">.</span><span class="n">get</span><span class="p">(</span>
+ <span class="n">projectId</span><span class="o">=</span><span class="n">project_id</span><span class="p">,</span>
+ <span class="n">datasetId</span><span class="o">=</span><span class="n">dataset_id</span><span class="p">,</span>
+ <span class="n">tableId</span><span class="o">=</span><span class="n">table_id</span>
+ <span class="p">)</span><span class="o">.</span><span class="n">execute</span><span class="p">()</span>
+ <span class="k">return</span> <span class="kc">True</span>
+ <span class="k">except</span> <span class="n">errors</span><span class="o">.</span><span class="n">HttpError</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
+ <span class="k">if</span> <span class="n">e</span><span class="o">.</span><span class="n">resp</span><span class="p">[</span><span class="s1">'status'</span><span class="p">]</span> <span class="o">==</span> <span class="s1">'404'</span><span class="p">:</span>
+ <span class="k">return</span> <span class="kc">False</span>
+ <span class="k">raise</span></div></div>
<span class="k">class</span> <span class="nc">BigQueryPandasConnector</span><span class="p">(</span><span class="n">GbqConnector</span><span class="p">):</span>
@@ -248,13 +307,14 @@
<span class="sd"> without forcing a three legged OAuth connection. Instead, we can inject</span>
<span class="sd"> service account credentials into the binding.</span>
<span class="sd"> """</span>
- <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">project_id</span><span class="p">,</span> <span class="n">service</span><span class="p">,</span> <span class="n">reauth</span><span class="o">=</span><span class="bp">False</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="bp">False</span><span class="p">):</span>
+ <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">project_id</span><span class="p">,</span> <span class="n">service</span><span class="p">,</span> <span class="n">reauth</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span> <span class="n">dialect</span><span class="o">=</span><span class="s1">'legacy'</span><span class="p">):</span>
<span class="n">gbq_check_google_client_version</span><span class="p">()</span>
<span class="n">gbq_test_google_api_imports</span><span class="p">()</span>
<span class="bp">self</span><span class="o">.</span><span class="n">project_id</span> <span class="o">=</span> <span class="n">project_id</span>
<span class="bp">self</span><span class="o">.</span><span class="n">reauth</span> <span class="o">=</span> <span class="n">reauth</span>
<span class="bp">self</span><span class="o">.</span><span class="n">service</span> <span class="o">=</span> <span class="n">service</span>
<span class="bp">self</span><span class="o">.</span><span class="n">verbose</span> <span class="o">=</span> <span class="n">verbose</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">dialect</span> <span class="o">=</span> <span class="n">dialect</span>
<span class="k">class</span> <span class="nc">BigQueryConnection</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
@@ -285,22 +345,25 @@
<span class="s2">"BigQueryConnection does not have transactions"</span><span class="p">)</span>
-<span class="k">class</span> <span class="nc">BigQueryBaseCursor</span><span class="p">(</span><span class="nb">object</span><span class="p">):</span>
+<span class="k">class</span> <span class="nc">BigQueryBaseCursor</span><span class="p">(</span><span class="n">LoggingMixin</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> The BigQuery base cursor contains helper methods to execute queries against</span>
<span class="sd"> BigQuery. The methods can be used directly by operators, in cases where a</span>
<span class="sd"> PEP 249 cursor isn't needed.</span>
<span class="sd"> """</span>
-
<span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">service</span><span class="p">,</span> <span class="n">project_id</span><span class="p">):</span>
<span class="bp">self</span><span class="o">.</span><span class="n">service</span> <span class="o">=</span> <span class="n">service</span>
<span class="bp">self</span><span class="o">.</span><span class="n">project_id</span> <span class="o">=</span> <span class="n">project_id</span>
<span class="k">def</span> <span class="nf">run_query</span><span class="p">(</span>
- <span class="bp">self</span><span class="p">,</span> <span class="n">bql</span><span class="p">,</span> <span class="n">destination_dataset_table</span> <span class="o">=</span> <span class="bp">False</span><span class="p">,</span>
+ <span class="bp">self</span><span class="p">,</span> <span class="n">bql</span><span class="p">,</span> <span class="n">destination_dataset_table</span> <span class="o">=</span> <span class="kc">False</span><span class="p">,</span>
<span class="n">write_disposition</span> <span class="o">=</span> <span class="s1">'WRITE_EMPTY'</span><span class="p">,</span>
- <span class="n">allow_large_results</span><span class="o">=</span><span class="bp">False</span><span class="p">,</span>
- <span class="n">udf_config</span> <span class="o">=</span> <span class="bp">False</span><span class="p">):</span>
+ <span class="n">allow_large_results</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
+ <span class="n">udf_config</span> <span class="o">=</span> <span class="kc">False</span><span class="p">,</span>
+ <span class="n">use_legacy_sql</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
+ <span class="n">maximum_billing_tier</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">create_disposition</span><span class="o">=</span><span class="s1">'CREATE_IF_NEEDED'</span><span class="p">,</span>
+ <span class="n">query_params</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Executes a BigQuery SQL query. Optionally persists results in a BigQuery</span>
<span class="sd"> table. See here:</span>
@@ -315,29 +378,40 @@
<span class="sd"> BigQuery table to save the query results.</span>
<span class="sd"> :param write_disposition: What to do if the table already exists in</span>
<span class="sd"> BigQuery.</span>
+<span class="sd"> :type write_disposition: string</span>
+<span class="sd"> :param create_disposition: Specifies whether the job is allowed to create new tables.</span>
+<span class="sd"> :type create_disposition: string</span>
<span class="sd"> :param allow_large_results: Whether to allow large results.</span>
<span class="sd"> :type allow_large_results: boolean</span>
<span class="sd"> :param udf_config: The User Defined Function configuration for the query.</span>
<span class="sd"> See https://cloud.google.com/bigquery/user-defined-functions for details.</span>
<span class="sd"> :type udf_config: list</span>
+<span class="sd"> :param use_legacy_sql: Whether to use legacy SQL (true) or standard SQL (false).</span>
+<span class="sd"> :type use_legacy_sql: boolean</span>
+<span class="sd"> :param maximum_billing_tier: Positive integer that serves as a multiplier of the basic price.</span>
+<span class="sd"> :type maximum_billing_tier: integer</span>
<span class="sd"> """</span>
<span class="n">configuration</span> <span class="o">=</span> <span class="p">{</span>
<span class="s1">'query'</span><span class="p">:</span> <span class="p">{</span>
<span class="s1">'query'</span><span class="p">:</span> <span class="n">bql</span><span class="p">,</span>
+ <span class="s1">'useLegacySql'</span><span class="p">:</span> <span class="n">use_legacy_sql</span><span class="p">,</span>
+ <span class="s1">'maximumBillingTier'</span><span class="p">:</span> <span class="n">maximum_billing_tier</span>
<span class="p">}</span>
<span class="p">}</span>
<span class="k">if</span> <span class="n">destination_dataset_table</span><span class="p">:</span>
<span class="k">assert</span> <span class="s1">'.'</span> <span class="ow">in</span> <span class="n">destination_dataset_table</span><span class="p">,</span> <span class="p">(</span>
<span class="s1">'Expected destination_dataset_table in the format of '</span>
- <span class="s1">'<dataset>.<table>. Got: {}'</span><span class="p">)</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">destination_dataset_table</span><span class="p">)</span>
- <span class="n">destination_dataset</span><span class="p">,</span> <span class="n">destination_table</span> <span class="o">=</span> \
- <span class="n">destination_dataset_table</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'.'</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span>
+ <span class="s1">'<dataset>.<table>. Got: </span><span class="si">{}</span><span class="s1">'</span><span class="p">)</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">destination_dataset_table</span><span class="p">)</span>
+ <span class="n">destination_project</span><span class="p">,</span> <span class="n">destination_dataset</span><span class="p">,</span> <span class="n">destination_table</span> <span class="o">=</span> \
+ <span class="n">_split_tablename</span><span class="p">(</span><span class="n">table_input</span><span class="o">=</span><span class="n">destination_dataset_table</span><span class="p">,</span>
+ <span class="n">default_project_id</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">)</span>
<span class="n">configuration</span><span class="p">[</span><span class="s1">'query'</span><span class="p">]</span><span class="o">.</span><span class="n">update</span><span class="p">({</span>
<span class="s1">'allowLargeResults'</span><span class="p">:</span> <span class="n">allow_large_results</span><span class="p">,</span>
<span class="s1">'writeDisposition'</span><span class="p">:</span> <span class="n">write_disposition</span><span class="p">,</span>
+ <span class="s1">'createDisposition'</span><span class="p">:</span> <span class="n">create_disposition</span><span class="p">,</span>
<span class="s1">'destinationTable'</span><span class="p">:</span> <span class="p">{</span>
- <span class="s1">'projectId'</span><span class="p">:</span> <span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span>
+ <span class="s1">'projectId'</span><span class="p">:</span> <span class="n">destination_project</span><span class="p">,</span>
<span class="s1">'datasetId'</span><span class="p">:</span> <span class="n">destination_dataset</span><span class="p">,</span>
<span class="s1">'tableId'</span><span class="p">:</span> <span class="n">destination_table</span><span class="p">,</span>
<span class="p">}</span>
@@ -348,12 +422,15 @@
<span class="s1">'userDefinedFunctionResources'</span><span class="p">:</span> <span class="n">udf_config</span>
<span class="p">})</span>
+ <span class="k">if</span> <span class="n">query_params</span><span class="p">:</span>
+ <span class="n">configuration</span><span class="p">[</span><span class="s1">'query'</span><span class="p">][</span><span class="s1">'queryParameters'</span><span class="p">]</span> <span class="o">=</span> <span class="n">query_params</span>
+
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">run_with_configuration</span><span class="p">(</span><span class="n">configuration</span><span class="p">)</span>
<span class="k">def</span> <span class="nf">run_extract</span><span class="p">(</span> <span class="c1"># noqa</span>
<span class="bp">self</span><span class="p">,</span> <span class="n">source_project_dataset_table</span><span class="p">,</span> <span class="n">destination_cloud_storage_uris</span><span class="p">,</span>
<span class="n">compression</span><span class="o">=</span><span class="s1">'NONE'</span><span class="p">,</span> <span class="n">export_format</span><span class="o">=</span><span class="s1">'CSV'</span><span class="p">,</span> <span class="n">field_delimiter</span><span class="o">=</span><span class="s1">','</span><span class="p">,</span>
- <span class="n">print_header</span><span class="o">=</span><span class="bp">True</span><span class="p">):</span>
+ <span class="n">print_header</span><span class="o">=</span><span class="kc">True</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Executes a BigQuery extract command to copy data from BigQuery to</span>
<span class="sd"> Google Cloud Storage. See here:</span>
@@ -379,9 +456,12 @@
<span class="sd"> :param print_header: Whether to print a header for a CSV file extract.</span>
<span class="sd"> :type print_header: boolean</span>
<span class="sd"> """</span>
+
<span class="n">source_project</span><span class="p">,</span> <span class="n">source_dataset</span><span class="p">,</span> <span class="n">source_table</span> <span class="o">=</span> \
- <span class="bp">self</span><span class="o">.</span><span class="n">_split_project_dataset_table_input</span><span class="p">(</span>
- <span class="s1">'source_project_dataset_table'</span><span class="p">,</span> <span class="n">source_project_dataset_table</span><span class="p">)</span>
+ <span class="n">_split_tablename</span><span class="p">(</span><span class="n">table_input</span><span class="o">=</span><span class="n">source_project_dataset_table</span><span class="p">,</span>
+ <span class="n">default_project_id</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span>
+ <span class="n">var_name</span><span class="o">=</span><span class="s1">'source_project_dataset_table'</span><span class="p">)</span>
+
<span class="n">configuration</span> <span class="o">=</span> <span class="p">{</span>
<span class="s1">'extract'</span><span class="p">:</span> <span class="p">{</span>
<span class="s1">'sourceTable'</span><span class="p">:</span> <span class="p">{</span>
@@ -418,14 +498,14 @@
<span class="sd"> For more details about these parameters.</span>
<span class="sd"> :param source_project_dataset_tables: One or more dotted</span>
-<span class="sd"> (<project>.)<dataset>.<table></span>
+<span class="sd"> (project:|project.)<dataset>.<table></span>
<span class="sd"> BigQuery tables to use as the source data. Use a list if there are</span>
<span class="sd"> multiple source tables.</span>
<span class="sd"> If <project> is not included, project will be the project defined</span>
<span class="sd"> in the connection json.</span>
<span class="sd"> :type source_project_dataset_tables: list|string</span>
<span class="sd"> :param destination_project_dataset_table: The destination BigQuery</span>
-<span class="sd"> table. Format is: <project>.<dataset>.<table></span>
+<span class="sd"> table. Format is: (project:|project.)<dataset>.<table></span>
<span class="sd"> :type destination_project_dataset_table: string</span>
<span class="sd"> :param write_disposition: The write disposition if the table already exists.</span>
<span class="sd"> :type write_disposition: string</span>
@@ -440,21 +520,18 @@
<span class="n">source_project_dataset_tables_fixup</span> <span class="o">=</span> <span class="p">[]</span>
<span class="k">for</span> <span class="n">source_project_dataset_table</span> <span class="ow">in</span> <span class="n">source_project_dataset_tables</span><span class="p">:</span>
<span class="n">source_project</span><span class="p">,</span> <span class="n">source_dataset</span><span class="p">,</span> <span class="n">source_table</span> <span class="o">=</span> \
- <span class="bp">self</span><span class="o">.</span><span class="n">_split_project_dataset_table_input</span><span class="p">(</span>
- <span class="s1">'source_project_dataset_table'</span><span class="p">,</span> <span class="n">source_project_dataset_table</span><span class="p">)</span>
+ <span class="n">_split_tablename</span><span class="p">(</span><span class="n">table_input</span><span class="o">=</span><span class="n">source_project_dataset_table</span><span class="p">,</span>
+ <span class="n">default_project_id</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span>
+ <span class="n">var_name</span><span class="o">=</span><span class="s1">'source_project_dataset_table'</span><span class="p">)</span>
<span class="n">source_project_dataset_tables_fixup</span><span class="o">.</span><span class="n">append</span><span class="p">({</span>
<span class="s1">'projectId'</span><span class="p">:</span> <span class="n">source_project</span><span class="p">,</span>
<span class="s1">'datasetId'</span><span class="p">:</span> <span class="n">source_dataset</span><span class="p">,</span>
<span class="s1">'tableId'</span><span class="p">:</span> <span class="n">source_table</span>
<span class="p">})</span>
- <span class="k">assert</span> <span class="mi">3</span> <span class="o">==</span> <span class="nb">len</span><span class="p">(</span><span class="n">destination_project_dataset_table</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'.'</span><span class="p">)),</span> <span class="p">(</span>
- <span class="s1">'Expected destination_project_dataset_table in the format of '</span>
- <span class="s1">'<project>.<dataset>.<table>. '</span>
- <span class="s1">'Got: {}'</span><span class="p">)</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">destination_project_dataset_table</span><span class="p">)</span>
-
<span class="n">destination_project</span><span class="p">,</span> <span class="n">destination_dataset</span><span class="p">,</span> <span class="n">destination_table</span> <span class="o">=</span> \
- <span class="n">destination_project_dataset_table</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'.'</span><span class="p">,</span> <span class="mi">2</span><span class="p">)</span>
+ <span class="n">_split_tablename</span><span class="p">(</span><span class="n">table_input</span><span class="o">=</span><span class="n">destination_project_dataset_table</span><span class="p">,</span>
+ <span class="n">default_project_id</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">)</span>
<span class="n">configuration</span> <span class="o">=</span> <span class="p">{</span>
<span class="s1">'copy'</span><span class="p">:</span> <span class="p">{</span>
<span class="s1">'createDisposition'</span><span class="p">:</span> <span class="n">create_disposition</span><span class="p">,</span>
@@ -477,7 +554,13 @@
<span class="n">create_disposition</span><span class="o">=</span><span class="s1">'CREATE_IF_NEEDED'</span><span class="p">,</span>
<span class="n">skip_leading_rows</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span>
<span class="n">write_disposition</span><span class="o">=</span><span class="s1">'WRITE_EMPTY'</span><span class="p">,</span>
- <span class="n">field_delimiter</span><span class="o">=</span><span class="s1">','</span><span class="p">):</span>
+ <span class="n">field_delimiter</span><span class="o">=</span><span class="s1">','</span><span class="p">,</span>
+ <span class="n">max_bad_records</span><span class="o">=</span><span class="mi">0</span><span class="p">,</span>
+ <span class="n">quote_character</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">allow_quoted_newlines</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
+ <span class="n">allow_jagged_rows</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
+ <span class="n">schema_update_options</span><span class="o">=</span><span class="p">(),</span>
+ <span class="n">src_fmt_configs</span><span class="o">=</span><span class="p">{}):</span>
<span class="sd">"""</span>
<span class="sd"> Executes a BigQuery load command to load data from Google Cloud Storage</span>
<span class="sd"> to BigQuery. See here:</span>
@@ -487,9 +570,9 @@
<span class="sd"> For more details about these parameters.</span>
<span class="sd"> :param destination_project_dataset_table:</span>
-<span class="sd"> The dotted (<project>.)<dataset>.<table> BigQuery table to load data into.</span>
-<span class="sd"> If <project> is not included, project will be the project defined in</span>
-<span class="sd"> the connection json.</span>
+<span class="sd"> The dotted (<project>.|<project>:)<dataset>.<table> BigQuery table to load</span>
+<span class="sd"> data into. If <project> is not included, project will be the project defined</span>
+<span class="sd"> in the connection json.</span>
<span class="sd"> :type destination_project_dataset_table: string</span>
<span class="sd"> :param schema_fields: The schema field list as defined here:</span>
<span class="sd"> https://cloud.google.com/bigquery/docs/reference/v2/jobs#configuration.load</span>
@@ -508,10 +591,56 @@
<span class="sd"> :type write_disposition: string</span>
<span class="sd"> :param field_delimiter: The delimiter to use when loading from a CSV.</span>
<span class="sd"> :type field_delimiter: string</span>
+<span class="sd"> :param max_bad_records: The maximum number of bad records that BigQuery can</span>
+<span class="sd"> ignore when running the job.</span>
+<span class="sd"> :type max_bad_records: int</span>
+<span class="sd"> :param quote_character: The value that is used to quote data sections in a CSV file.</span>
+<span class="sd"> :type quote_character: string</span>
+<span class="sd"> :param allow_quoted_newlines: Whether to allow quoted newlines (true) or not (false).</span>
+<span class="sd"> :type allow_quoted_newlines: boolean</span>
+<span class="sd"> :param allow_jagged_rows: Accept rows that are missing trailing optional columns.</span>
+<span class="sd"> The missing values are treated as nulls. If false, records with missing trailing columns</span>
+<span class="sd"> are treated as bad records, and if there are too many bad records, an invalid error is</span>
+<span class="sd"> returned in the job result. Only applicable when soure_format is CSV.</span>
+<span class="sd"> :type allow_jagged_rows: bool</span>
+<span class="sd"> :param schema_update_options: Allows the schema of the desitination</span>
+<span class="sd"> table to be updated as a side effect of the load job.</span>
+<span class="sd"> :type schema_update_options: list</span>
+<span class="sd"> :param src_fmt_configs: configure optional fields specific to the source format</span>
+<span class="sd"> :type src_fmt_configs: dict</span>
<span class="sd"> """</span>
+
+ <span class="c1"># bigquery only allows certain source formats</span>
+ <span class="c1"># we check to make sure the passed source format is valid</span>
+ <span class="c1"># if it's not, we raise a ValueError</span>
+ <span class="c1"># Refer to this link for more details:</span>
+ <span class="c1"># https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.query.tableDefinitions.(key).sourceFormat</span>
+ <span class="n">source_format</span> <span class="o">=</span> <span class="n">source_format</span><span class="o">.</span><span class="n">upper</span><span class="p">()</span>
+ <span class="n">allowed_formats</span> <span class="o">=</span> <span class="p">[</span><span class="s2">"CSV"</span><span class="p">,</span> <span class="s2">"NEWLINE_DELIMITED_JSON"</span><span class="p">,</span> <span class="s2">"AVRO"</span><span class="p">,</span> <span class="s2">"GOOGLE_SHEETS"</span><span class="p">,</span> <span class="s2">"DATASTORE_BACKUP"</span><span class="p">]</span>
+ <span class="k">if</span> <span class="n">source_format</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">allowed_formats</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">"</span><span class="si">{0}</span><span class="s2"> is not a valid source format. "</span>
+ <span class="s2">"Please use one of the following types: </span><span class="si">{1}</span><span class="s2">"</span>
+ <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">source_format</span><span class="p">,</span> <span class="n">allowed_formats</span><span class="p">))</span>
+
+ <span class="c1"># bigquery also allows you to define how you want a table's schema to change</span>
+ <span class="c1"># as a side effect of a load</span>
+ <span class="c1"># for more details:</span>
+ <span class="c1"># https://cloud.google.com/bigquery/docs/reference/rest/v2/jobs#configuration.load.schemaUpdateOptions</span>
+ <span class="n">allowed_schema_update_options</span> <span class="o">=</span> <span class="p">[</span>
+ <span class="s1">'ALLOW_FIELD_ADDITION'</span><span class="p">,</span>
+ <span class="s2">"ALLOW_FIELD_RELAXATION"</span>
+ <span class="p">]</span>
+ <span class="k">if</span> <span class="ow">not</span> <span class="nb">set</span><span class="p">(</span><span class="n">allowed_schema_update_options</span><span class="p">)</span><span class="o">.</span><span class="n">issuperset</span><span class="p">(</span><span class="nb">set</span><span class="p">(</span><span class="n">schema_update_options</span><span class="p">)):</span>
+ <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span>
+ <span class="s2">"</span><span class="si">{0}</span><span class="s2"> contains invalid schema update options. "</span>
+ <span class="s2">"Please only use one or more of the following options: </span><span class="si">{1}</span><span class="s2">"</span>
+ <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">schema_update_options</span><span class="p">,</span> <span class="n">allowed_schema_update_options</span><span class="p">)</span>
+ <span class="p">)</span>
+
<span class="n">destination_project</span><span class="p">,</span> <span class="n">destination_dataset</span><span class="p">,</span> <span class="n">destination_table</span> <span class="o">=</span> \
- <span class="bp">self</span><span class="o">.</span><span class="n">_split_project_dataset_table_input</span><span class="p">(</span>
- <span class="s1">'destination_project_dataset_table'</span><span class="p">,</span> <span class="n">destination_project_dataset_table</span><span class="p">)</span>
+ <span class="n">_split_tablename</span><span class="p">(</span><span class="n">table_input</span><span class="o">=</span><span class="n">destination_project_dataset_table</span><span class="p">,</span>
+ <span class="n">default_project_id</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span>
+ <span class="n">var_name</span><span class="o">=</span><span class="s1">'destination_project_dataset_table'</span><span class="p">)</span>
<span class="n">configuration</span> <span class="o">=</span> <span class="p">{</span>
<span class="s1">'load'</span><span class="p">:</span> <span class="p">{</span>
@@ -521,43 +650,62 @@
<span class="s1">'datasetId'</span><span class="p">:</span> <span class="n">destination_dataset</span><span class="p">,</span>
<span class="s1">'tableId'</span><span class="p">:</span> <span class="n">destination_table</span><span class="p">,</span>
<span class="p">},</span>
- <span class="s1">'schema'</span><span class="p">:</span> <span class="p">{</span>
- <span class="s1">'fields'</span><span class="p">:</span> <span class="n">schema_fields</span>
- <span class="p">},</span>
<span class="s1">'sourceFormat'</span><span class="p">:</span> <span class="n">source_format</span><span class="p">,</span>
<span class="s1">'sourceUris'</span><span class="p">:</span> <span class="n">source_uris</span><span class="p">,</span>
<span class="s1">'writeDisposition'</span><span class="p">:</span> <span class="n">write_disposition</span><span class="p">,</span>
<span class="p">}</span>
<span class="p">}</span>
+ <span class="k">if</span> <span class="n">schema_fields</span><span class="p">:</span>
+ <span class="n">configuration</span><span class="p">[</span><span class="s1">'load'</span><span class="p">][</span><span class="s1">'schema'</span><span class="p">]</span> <span class="o">=</span> <span class="p">{</span>
+ <span class="s1">'fields'</span><span class="p">:</span> <span class="n">schema_fields</span>
+ <span class="p">}</span>
+
+ <span class="k">if</span> <span class="n">schema_update_options</span><span class="p">:</span>
+ <span class="k">if</span> <span class="n">write_disposition</span> <span class="ow">not</span> <span class="ow">in</span> <span class="p">[</span><span class="s2">"WRITE_APPEND"</span><span class="p">,</span> <span class="s2">"WRITE_TRUNCATE"</span><span class="p">]:</span>
+ <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span>
+ <span class="s2">"schema_update_options is only "</span>
+ <span class="s2">"allowed if write_disposition is "</span>
+ <span class="s2">"'WRITE_APPEND' or 'WRITE_TRUNCATE'."</span>
+ <span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span>
+ <span class="s2">"Adding experimental "</span>
+ <span class="s2">"'schemaUpdateOptions': </span><span class="si">{0}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">schema_update_options</span><span class="p">)</span>
+ <span class="p">)</span>
+ <span class="n">configuration</span><span class="p">[</span><span class="s1">'load'</span><span class="p">][</span><span class="s1">'schemaUpdateOptions'</span><span class="p">]</span> <span class="o">=</span> <span class="n">schema_update_options</span>
+
+ <span class="k">if</span> <span class="n">max_bad_records</span><span class="p">:</span>
+ <span class="n">configuration</span><span class="p">[</span><span class="s1">'load'</span><span class="p">][</span><span class="s1">'maxBadRecords'</span><span class="p">]</span> <span class="o">=</span> <span class="n">max_bad_records</span>
+
+ <span class="c1"># if following fields are not specified in src_fmt_configs,</span>
+ <span class="c1"># honor the top-level params for backward-compatibility</span>
+ <span class="k">if</span> <span class="s1">'skipLeadingRows'</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">src_fmt_configs</span><span class="p">:</span>
+ <span class="n">src_fmt_configs</span><span class="p">[</span><span class="s1">'skipLeadingRows'</span><span class="p">]</span> <span class="o">=</span> <span class="n">skip_leading_rows</span>
+ <span class="k">if</span> <span class="s1">'fieldDelimiter'</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">src_fmt_configs</span><span class="p">:</span>
+ <span class="n">src_fmt_configs</span><span class="p">[</span><span class="s1">'fieldDelimiter'</span><span class="p">]</span> <span class="o">=</span> <span class="n">field_delimiter</span>
+ <span class="k">if</span> <span class="n">quote_character</span><span class="p">:</span>
+ <span class="n">src_fmt_configs</span><span class="p">[</span><span class="s1">'quote'</span><span class="p">]</span> <span class="o">=</span> <span class="n">quote_character</span>
+ <span class="k">if</span> <span class="n">allow_quoted_newlines</span><span class="p">:</span>
+ <span class="n">src_fmt_configs</span><span class="p">[</span><span class="s1">'allowQuotedNewlines'</span><span class="p">]</span> <span class="o">=</span> <span class="n">allow_quoted_newlines</span>
+
+ <span class="n">src_fmt_to_configs_mapping</span> <span class="o">=</span> <span class="p">{</span>
+ <span class="s1">'CSV'</span><span class="p">:</span> <span class="p">[</span><span class="s1">'allowJaggedRows'</span><span class="p">,</span> <span class="s1">'allowQuotedNewlines'</span><span class="p">,</span> <span class="s1">'autodetect'</span><span class="p">,</span>
+ <span class="s1">'fieldDelimiter'</span><span class="p">,</span> <span class="s1">'skipLeadingRows'</span><span class="p">,</span> <span class="s1">'ignoreUnknownValues'</span><span class="p">,</span>
+ <span class="s1">'nullMarker'</span><span class="p">,</span> <span class="s1">'quote'</span><span class="p">],</span>
+ <span class="s1">'DATASTORE_BACKUP'</span><span class="p">:</span> <span class="p">[</span><span class="s1">'projectionFields'</span><span class="p">],</span>
+ <span class="s1">'NEWLINE_DELIMITED_JSON'</span><span class="p">:</span> <span class="p">[</span><span class="s1">'autodetect'</span><span class="p">,</span> <span class="s1">'ignoreUnknownValues'</span><span class="p">],</span>
+ <span class="s1">'AVRO'</span><span class="p">:</span> <span class="p">[],</span>
+ <span class="p">}</span>
+ <span class="n">valid_configs</span> <span class="o">=</span> <span class="n">src_fmt_to_configs_mapping</span><span class="p">[</span><span class="n">source_format</span><span class="p">]</span>
+ <span class="n">src_fmt_configs</span> <span class="o">=</span> <span class="p">{</span><span class="n">k</span><span class="p">:</span> <span class="n">v</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">src_fmt_configs</span><span class="o">.</span><span class="n">items</span><span class="p">()</span>
+ <span class="k">if</span> <span class="n">k</span> <span class="ow">in</span> <span class="n">valid_configs</span><span class="p">}</span>
+ <span class="n">configuration</span><span class="p">[</span><span class="s1">'load'</span><span class="p">]</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">src_fmt_configs</span><span class="p">)</span>
- <span class="k">if</span> <span class="n">source_format</span> <span class="o">==</span> <span class="s1">'CSV'</span><span class="p">:</span>
- <span class="n">configuration</span><span class="p">[</span><span class="s1">'load'</span><span class="p">][</span><span class="s1">'skipLeadingRows'</span><span class="p">]</span> <span class="o">=</span> <span class="n">skip_leading_rows</span>
- <span class="n">configuration</span><span class="p">[</span><span class="s1">'load'</span><span class="p">][</span><span class="s1">'fieldDelimiter'</span><span class="p">]</span> <span class="o">=</span> <span class="n">field_delimiter</span>
+ <span class="k">if</span> <span class="n">allow_jagged_rows</span><span class="p">:</span>
+ <span class="n">configuration</span><span class="p">[</span><span class="s1">'load'</span><span class="p">][</span><span class="s1">'allowJaggedRows'</span><span class="p">]</span> <span class="o">=</span> <span class="n">allow_jagged_rows</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">run_with_configuration</span><span class="p">(</span><span class="n">configuration</span><span class="p">)</span>
- <span class="k">def</span> <span class="nf">_split_project_dataset_table_input</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">var_name</span><span class="p">,</span> <span class="n">project_dataset_table</span><span class="p">):</span>
- <span class="sd">"""</span>
-<span class="sd"> :param var_name: the name of the variable input, for logging and erroring purposes.</span>
-<span class="sd"> :type var_name: str</span>
-<span class="sd"> :param project_dataset_table: input string in (<project>.)<dataset>.<project> format.</span>
-<span class="sd"> if project is not included in the string, self.project_id will be returned in the tuple.</span>
-<span class="sd"> :type project_dataset_table: str</span>
-<span class="sd"> :return: (project, dataset, table) tuple</span>
-<span class="sd"> """</span>
- <span class="n">table_split</span> <span class="o">=</span> <span class="n">project_dataset_table</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'.'</span><span class="p">)</span>
- <span class="k">assert</span> <span class="nb">len</span><span class="p">(</span><span class="n">table_split</span><span class="p">)</span> <span class="o">==</span> <span class="mi">2</span> <span class="ow">or</span> <span class="nb">len</span><span class="p">(</span><span class="n">table_split</span><span class="p">)</span> <span class="o">==</span> <span class="mi">3</span><span class="p">,</span> <span class="p">(</span>
- <span class="s1">'Expected {var} in the format of (<project.)<dataset>.<table>, '</span>
- <span class="s1">'got {input}'</span><span class="p">)</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">var</span><span class="o">=</span><span class="n">var_name</span><span class="p">,</span> <span class="nb">input</span><span class="o">=</span><span class="n">project_dataset_table</span><span class="p">)</span>
-
- <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">table_split</span><span class="p">)</span> <span class="o">==</span> <span class="mi">2</span><span class="p">:</span>
- <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'project not included in {var}: {input}; using project "{project}"'</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">var</span><span class="o">=</span><span class="n">var_name</span><span class="p">,</span> <span class="nb">input</span><span class="o">=</span><span class="n">project_dataset_table</span><span class="p">,</span> <span class="n">project</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">))</span>
- <span class="n">dataset</span><span class="p">,</span> <span class="n">table</span> <span class="o">=</span> <span class="n">table_split</span>
- <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span> <span class="n">dataset</span><span class="p">,</span> <span class="n">table</span>
- <span class="k">else</span><span class="p">:</span>
- <span class="n">project</span><span class="p">,</span> <span class="n">dataset</span><span class="p">,</span> <span class="n">table</span> <span class="o">=</span> <span class="n">table_split</span>
- <span class="k">return</span> <span class="n">project</span><span class="p">,</span> <span class="n">dataset</span><span class="p">,</span> <span class="n">table</span>
-
<span class="k">def</span> <span class="nf">run_with_configuration</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">configuration</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Executes a BigQuery SQL query. See here:</span>
@@ -581,18 +729,32 @@
<span class="o">.</span><span class="n">insert</span><span class="p">(</span><span class="n">projectId</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span> <span class="n">body</span><span class="o">=</span><span class="n">job_data</span><span class="p">)</span> \
<span class="o">.</span><span class="n">execute</span><span class="p">()</span>
<span class="n">job_id</span> <span class="o">=</span> <span class="n">query_reply</span><span class="p">[</span><span class="s1">'jobReference'</span><span class="p">][</span><span class="s1">'jobId'</span><span class="p">]</span>
- <span class="n">job</span> <span class="o">=</span> <span class="n">jobs</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">projectId</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span> <span class="n">jobId</span><span class="o">=</span><span class="n">job_id</span><span class="p">)</span><span class="o">.</span><span class="n">execute</span><span class="p">()</span>
<span class="c1"># Wait for query to finish.</span>
- <span class="k">while</span> <span class="ow">not</span> <span class="n">job</span><span class="p">[</span><span class="s1">'status'</span><span class="p">][</span><span class="s1">'state'</span><span class="p">]</span> <span class="o">==</span> <span class="s1">'DONE'</span><span class="p">:</span>
- <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'Waiting for job to complete: </span><span class="si">%s</span><span class="s1">, </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span> <span class="n">job_id</span><span class="p">)</span>
- <span class="n">time</span><span class="o">.</span><span class="n">sleep</span><span class="p">(</span><span class="mi">5</span><span class="p">)</span>
- <span class="n">job</span> <span class="o">=</span> <span class="n">jobs</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">projectId</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span> <span class="n">jobId</span><span class="o">=</span><span class="n">job_id</span><span class="p">)</span><span class="o">.</span><span class="n">execute</span><span class="p">()</span>
-
- <span class="c1"># Check if job had errors.</span>
- <span class="k">if</span> <span class="s1">'errorResult'</span> <span class="ow">in</span> <span class="n">job</span><span class="p">[</span><span class="s1">'status'</span><span class="p">]:</span>
- <span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span>
- <span class="s1">'BigQuery job failed. Final error was: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">job</span><span class="p">[</span><span class="s1">'status'</span><span class="p">][</span><span class="s1">'errorResult'</span><span class="p">])</span>
+ <span class="n">keep_polling_job</span> <span class="o">=</span> <span class="kc">True</span>
+ <span class="k">while</span> <span class="p">(</span><span class="n">keep_polling_job</span><span class="p">):</span>
+ <span class="k">try</span><span class="p">:</span>
+ <span class="n">job</span> <span class="o">=</span> <span class="n">jobs</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="n">projectId</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span> <span class="n">jobId</span><span class="o">=</span><span class="n">job_id</span><span class="p">)</span><span class="o">.</span><span class="n">execute</span><span class="p">()</span>
+ <span class="k">if</span> <span class="p">(</span><span class="n">job</span><span class="p">[</span><span class="s1">'status'</span><span class="p">][</span><span class="s1">'state'</span><span class="p">]</span> <span class="o">==</span> <span class="s1">'DONE'</span><span class="p">):</span>
+ <span class="n">keep_polling_job</span> <span class="o">=</span> <span class="kc">False</span>
+ <span class="c1"># Check if job had errors.</span>
+ <span class="k">if</span> <span class="s1">'errorResult'</span> <span class="ow">in</span> <span class="n">job</span><span class="p">[</span><span class="s1">'status'</span><span class="p">]:</span>
+ <span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span>
+ <span class="s1">'BigQuery job failed. Final error was: </span><span class="si">{}</span><span class="s1">. The job was: </span><span class="si">{}</span><span class="s1">'</span><span class="o">.</span><span class="n">format</span><span class="p">(</span>
+ <span class="n">job</span><span class="p">[</span><span class="s1">'status'</span><span class="p">][</span><span class="s1">'errorResult'</span><span class="p">],</span> <span class="n">job</span>
+ <span class="p">)</span>
+ <span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'Waiting for job to complete : </span><span class="si">%s</span><span class="s1">, </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">,</span> <span class="n">job_id</span><span class="p">)</span>
+ <span class="n">time</span><span class="o">.</span><span class="n">sleep</span><span class="p">(</span><span class="mi">5</span><span class="p">)</span>
+
+ <span class="k">except</span> <span class="n">HttpError</span> <span class="k">as</span> <span class="n">err</span><span class="p">:</span>
+ <span class="k">if</span> <span class="n">err</span><span class="o">.</span><span class="n">resp</span><span class="o">.</span><span class="n">status</span> <span class="ow">in</span> <span class="p">[</span><span class="mi">500</span><span class="p">,</span> <span class="mi">503</span><span class="p">]:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'</span><span class="si">%s</span><span class="s1">: Retryable error, waiting for job to complete: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">err</span><span class="o">.</span><span class="n">resp</span><span class="o">.</span><span class="n">status</span><span class="p">,</span> <span class="n">job_id</span><span class="p">)</span>
+ <span class="n">time</span><span class="o">.</span><span class="n">sleep</span><span class="p">(</span><span class="mi">5</span><span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span>
+ <span class="s1">'BigQuery job status check failed. Final error was: </span><span class="si">%s</span><span class="s1">'</span><span class="p">,</span> <span class="n">err</span><span class="o">.</span><span class="n">resp</span><span class="o">.</span><span class="n">status</span><span class="p">)</span>
<span class="k">return</span> <span class="n">job_id</span>
@@ -611,7 +773,7 @@
<span class="k">return</span> <span class="n">tables_resource</span><span class="p">[</span><span class="s1">'schema'</span><span class="p">]</span>
<span class="k">def</span> <span class="nf">get_tabledata</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> <span class="n">table_id</span><span class="p">,</span>
- <span class="n">max_results</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span> <span class="n">page_token</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span> <span class="n">start_index</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+ <span class="n">max_results</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">page_token</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">start_index</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Get the data of a given dataset.table.</span>
<span class="sd"> see https://cloud.google.com/bigquery/docs/reference/v2/tabledata/list</span>
@@ -639,15 +801,56 @@
<span class="o">.</span><span class="n">execute</span><span class="p">()</span>
<span class="p">)</span>
- <span class="k">def</span> <span class="nf">run_table_upsert</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> <span class="n">table_resource</span><span class="p">,</span> <span class="n">project_id</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+ <span class="k">def</span> <span class="nf">run_table_delete</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">deletion_dataset_table</span><span class="p">,</span> <span class="n">ignore_if_missing</span><span class="o">=</span><span class="kc">False</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Delete an existing table from the dataset;</span>
+<span class="sd"> If the table does not exist, return an error unless ignore_if_missing</span>
+<span class="sd"> is set to True.</span>
+
+<span class="sd"> :param deletion_dataset_table: A dotted</span>
+<span class="sd"> (<project>.|<project>:)<dataset>.<table> that indicates which table</span>
+<span class="sd"> will be deleted.</span>
+<span class="sd"> :type deletion_dataset_table: str</span>
+<span class="sd"> :param ignore_if_missing: if True, then return success even if the</span>
+<span class="sd"> requested table does not exist.</span>
+<span class="sd"> :type ignore_if_missing: boolean</span>
+<span class="sd"> :return:</span>
+<span class="sd"> """</span>
+
+ <span class="k">assert</span> <span class="s1">'.'</span> <span class="ow">in</span> <span class="n">deletion_dataset_table</span><span class="p">,</span> <span class="p">(</span>
+ <span class="s1">'Expected deletion_dataset_table in the format of '</span>
+ <span class="s1">'<dataset>.<table>. Got: </span><span class="si">{}</span><span class="s1">'</span><span class="p">)</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">deletion_dataset_table</span><span class="p">)</span>
+ <span class="n">deletion_project</span><span class="p">,</span> <span class="n">deletion_dataset</span><span class="p">,</span> <span class="n">deletion_table</span> <span class="o">=</span> \
+ <span class="n">_split_tablename</span><span class="p">(</span><span class="n">table_input</span><span class="o">=</span><span class="n">deletion_dataset_table</span><span class="p">,</span>
+ <span class="n">default_project_id</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">project_id</span><span class="p">)</span>
+
+ <span class="k">try</span><span class="p">:</span>
+ <span class="n">tables_resource</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">service</span><span class="o">.</span><span class="n">tables</span><span class="p">()</span> \
+ <span class="o">.</span><span class="n">delete</span><span class="p">(</span><span class="n">projectId</span><span class="o">=</span><span class="n">deletion_project</span><span class="p">,</span>
+ <span class="n">datasetId</span><span class="o">=</span><span class="n">deletion_dataset</span><span class="p">,</span>
+ <span class="n">tableId</span><span class="o">=</span><span class="n">deletion_table</span><span class="p">)</span> \
+ <span class="o">.</span><span class="n">execute</span><span class="p">()</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'Deleted table </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1">.'</span><span class="p">,</span>
+ <span class="n">deletion_project</span><span class="p">,</span> <span class="n">deletion_dataset</span><span class="p">,</span> <span class="n">deletion_table</span><span class="p">)</span>
+ <span class="k">except</span> <span class="n">HttpError</span><span class="p">:</span>
+ <span class="k">if</span> <span class="ow">not</span> <span class="n">ignore_if_missing</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="ne">Exception</span><span class="p">(</span>
+ <span class="s1">'Table deletion failed. Table does not exist.'</span><span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'Table does not exist. Skipping.'</span><span class="p">)</span>
+
+
+ <span class="k">def</span> <span class="nf">run_table_upsert</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> <span class="n">table_resource</span><span class="p">,</span> <span class="n">project_id</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> creates a new, empty table in the dataset;</span>
<span class="sd"> If the table already exists, update the existing table.</span>
<span class="sd"> Since BigQuery does not natively allow table upserts, this is not an</span>
<span class="sd"> atomic operation.</span>
+
<span class="sd"> :param dataset_id: the dataset to upsert the table into.</span>
<span class="sd"> :type dataset_id: str</span>
-<span class="sd"> :param table_resource: a table resource. see https://cloud.google.com/bigquery/docs/reference/v2/tables#resource</span>
+<span class="sd"> :param table_resource: a table resource. see</span>
+<span class="sd"> https://cloud.google.com/bigquery/docs/reference/v2/tables#resource</span>
<span class="sd"> :type table_resource: dict</span>
<span class="sd"> :param project_id: the project to upsert the table into. If None,</span>
<span class="sd"> project will be self.project_id.</span>
@@ -655,15 +858,17 @@
<span class="sd"> """</span>
<span class="c1"># check to see if the table exists</span>
<span class="n">table_id</span> <span class="o">=</span> <span class="n">table_resource</span><span class="p">[</span><span class="s1">'tableReference'</span><span class="p">][</span><span class="s1">'tableId'</span><span class="p">]</span>
- <span class="n">project_id</span> <span class="o">=</span> <span class="n">project_id</span> <span class="k">if</span> <span class="n">project_id</span> <span class="ow">is</span> <span class="ow">not</span> <span class="bp">None</span> <span class="k">else</span> <span class="bp">self</span><span class="o">.</span><span class="n">project_id</span>
+ <span class="n">project_id</span> <span class="o">=</span> <span class="n">project_id</span> <span class="k">if</span> <span class="n">project_id</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span> <span class="k">else</span> <span class="bp">self</span><span class="o">.</span><span class="n">project_id</span>
<span class="n">tables_list_resp</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">service</span><span class="o">.</span><span class="n">tables</span><span class="p">()</span><span class="o">.</span><span class="n">list</span><span class="p">(</span><span class="n">projectId</span><span class="o">=</span><span class="n">project_id</span><span class="p">,</span>
<span class="n">datasetId</span><span class="o">=</span><span class="n">dataset_id</span><span class="p">)</span><span class="o">.</span><span class="n">execute</span><span class="p">()</span>
- <span class="k">while</span> <span class="bp">True</span><span class="p">:</span>
+ <span class="k">while</span> <span class="kc">True</span><span class="p">:</span>
<span class="k">for</span> <span class="n">table</span> <span class="ow">in</span> <span class="n">tables_list_resp</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'tables'</span><span class="p">,</span> <span class="p">[]):</span>
<span class="k">if</span> <span class="n">table</span><span class="p">[</span><span class="s1">'tableReference'</span><span class="p">][</span><span class="s1">'tableId'</span><span class="p">]</span> <span class="o">==</span> <span class="n">table_id</span><span class="p">:</span>
<span class="c1"># found the table, do update</span>
- <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'table </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1"> exists, updating.'</span><span class="p">,</span>
- <span class="n">project_id</span><span class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> <span class="n">table_id</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span>
+ <span class="s1">'Table </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1"> exists, updating.'</span><span class="p">,</span>
+ <span class="n">project_id</span><span class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> <span class="n">table_id</span>
+ <span class="p">)</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">service</span><span class="o">.</span><span class="n">tables</span><span class="p">()</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">projectId</span><span class="o">=</span><span class="n">project_id</span><span class="p">,</span>
<span class="n">datasetId</span><span class="o">=</span><span class="n">dataset_id</span><span class="p">,</span>
<span class="n">tableId</span><span class="o">=</span><span class="n">table_id</span><span class="p">,</span>
@@ -678,8 +883,10 @@
<span class="c1"># If there is no next page, then the table doesn't exist.</span>
<span class="k">else</span><span class="p">:</span>
<span class="c1"># do insert</span>
- <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'table </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1"> does not exist. creating.'</span><span class="p">,</span>
- <span class="n">project_id</span><span class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> <span class="n">table_id</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span>
+ <span class="s1">'Table </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1"> does not exist. creating.'</span><span class="p">,</span>
+ <span class="n">project_id</span><span class="p">,</span> <span class="n">dataset_id</span><span class="p">,</span> <span class="n">table_id</span>
+ <span class="p">)</span>
<span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">service</span><span class="o">.</span><span class="n">tables</span><span class="p">()</span><span class="o">.</span><span class="n">insert</span><span class="p">(</span><span class="n">projectId</span><span class="o">=</span><span class="n">project_id</span><span class="p">,</span>
<span class="n">datasetId</span><span class="o">=</span><span class="n">dataset_id</span><span class="p">,</span>
<span class="n">body</span><span class="o">=</span><span class="n">table_resource</span><span class="p">)</span><span class="o">.</span><span class="n">execute</span><span class="p">()</span>
@@ -688,12 +895,13 @@
<span class="n">source_dataset</span><span class="p">,</span>
<span class="n">view_dataset</span><span class="p">,</span>
<span class="n">view_table</span><span class="p">,</span>
- <span class="n">source_project</span> <span class="o">=</span> <span class="bp">None</span><span class="p">,</span>
- <span class="n">view_project</span> <span class="o">=</span> <span class="bp">None</span><span class="p">):</span>
+ <span class="n">source_project</span> <span class="o">=</span> <span class="kc">None</span><span class="p">,</span>
+ <span class="n">view_project</span> <span class="o">=</span> <span class="kc">None</span><span class="p">):</span>
<span class="sd">"""</span>
<span class="sd"> Grant authorized view access of a dataset to a view table.</span>
<span class="sd"> If this view has already been granted access to the dataset, do nothing.</span>
<span class="sd"> This method is not atomic. Running it may clobber a simultaneous update.</span>
+
<span class="sd"> :param source_dataset: the source dataset</span>
<span class="sd"> :type source_dataset: str</span>
<span class="sd"> :param view_dataset: the dataset that the view is in</span>
@@ -723,18 +931,20 @@
<span class="s1">'tableId'</span><span class="p">:</span> <span class="n">view_table</span><span class="p">}}</span>
<span class="c1"># check to see if the view we want to add already exists.</span>
<span class="k">if</span> <span class="n">view_access</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">access</span><span class="p">:</span>
- <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s1">'granting table </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1"> authorized view access to </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1"> dataset.'</span><span class="p">,</span>
- <span class="n">view_project</span><span class="p">,</span> <span class="n">view_dataset</span><span class="p">,</span> <span class="n">view_table</span><span class="p">,</span>
- <span class="n">source_project</span><span class="p">,</span> <span class="n">source_dataset</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span>
+ <span class="s1">'Granting table </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1">.</span><span class="si">%s</span><span class="s1"> authorized view access to </span><span class="si">%s</span><span class="s1">:</span><span class="si">%s</span><span class="s1"> dataset.'</span><span class="p">,</span>
+ <span class="n">view_proje
<TRUNCATED>