You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2018/08/29 20:24:58 UTC
[30/48] incubator-airflow-site git commit: 1.10.0 with Updated Api
Reference
http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_modules/airflow/hooks/hive_hooks.html
----------------------------------------------------------------------
diff --git a/_modules/airflow/hooks/hive_hooks.html b/_modules/airflow/hooks/hive_hooks.html
new file mode 100644
index 0000000..ff22edf
--- /dev/null
+++ b/_modules/airflow/hooks/hive_hooks.html
@@ -0,0 +1,1098 @@
+
+
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+ <meta charset="utf-8">
+
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+ <title>airflow.hooks.hive_hooks — Airflow Documentation</title>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <link rel="stylesheet" href="../../../_static/css/theme.css" type="text/css" />
+ <link rel="stylesheet" href="../../../_static/pygments.css" type="text/css" />
+ <link rel="index" title="Index" href="../../../genindex.html" />
+ <link rel="search" title="Search" href="../../../search.html" />
+
+
+ <script src="../../../_static/js/modernizr.min.js"></script>
+
+</head>
+
+<body class="wy-body-for-nav">
+
+
+ <div class="wy-grid-for-nav">
+
+
+ <nav data-toggle="wy-nav-shift" class="wy-nav-side">
+ <div class="wy-side-scroll">
+ <div class="wy-side-nav-search">
+
+
+
+ <a href="../../../index.html" class="icon icon-home"> Airflow
+
+
+
+ </a>
+
+
+
+
+
+
+
+<div role="search">
+ <form id="rtd-search-form" class="wy-form" action="../../../search.html" method="get">
+ <input type="text" name="q" placeholder="Search docs" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+</div>
+
+
+ </div>
+
+ <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+
+
+
+
+
+
+ <ul>
+<li class="toctree-l1"><a class="reference internal" href="../../../project.html">Project</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../license.html">License</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../start.html">Quick Start</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../installation.html">Installation</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../tutorial.html">Tutorial</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../howto/index.html">How-to Guides</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../ui.html">UI / Screenshots</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../concepts.html">Concepts</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../profiling.html">Data Profiling</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../cli.html">Command Line Interface</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../scheduler.html">Scheduling & Triggers</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../plugins.html">Plugins</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../security.html">Security</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../timezone.html">Time zones</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../api.html">Experimental Rest API</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../integration.html">Integration</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../lineage.html">Lineage</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../faq.html">FAQ</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../../../code.html">API Reference</a></li>
+</ul>
+
+
+
+ </div>
+ </div>
+ </nav>
+
+ <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+
+ <nav class="wy-nav-top" aria-label="top navigation">
+
+ <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+ <a href="../../../index.html">Airflow</a>
+
+ </nav>
+
+
+ <div class="wy-nav-content">
+
+ <div class="rst-content">
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+<div role="navigation" aria-label="breadcrumbs navigation">
+
+ <ul class="wy-breadcrumbs">
+
+ <li><a href="../../../index.html">Docs</a> »</li>
+
+ <li><a href="../../index.html">Module code</a> »</li>
+
+ <li>airflow.hooks.hive_hooks</li>
+
+
+ <li class="wy-breadcrumbs-aside">
+
+ </li>
+
+ </ul>
+
+
+ <hr/>
+</div>
+ <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
+ <div itemprop="articleBody">
+
+ <h1>Source code for airflow.hooks.hive_hooks</h1><div class="highlight"><pre>
+<span></span><span class="c1"># -*- coding: utf-8 -*-</span>
+<span class="c1">#</span>
+<span class="c1"># Licensed to the Apache Software Foundation (ASF) under one</span>
+<span class="c1"># or more contributor license agreements. See the NOTICE file</span>
+<span class="c1"># distributed with this work for additional information</span>
+<span class="c1"># regarding copyright ownership. The ASF licenses this file</span>
+<span class="c1"># to you under the Apache License, Version 2.0 (the</span>
+<span class="c1"># "License"); you may not use this file except in compliance</span>
+<span class="c1"># with the License. You may obtain a copy of the License at</span>
+<span class="c1">#</span>
+<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
+<span class="c1">#</span>
+<span class="c1"># Unless required by applicable law or agreed to in writing,</span>
+<span class="c1"># software distributed under the License is distributed on an</span>
+<span class="c1"># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY</span>
+<span class="c1"># KIND, either express or implied. See the License for the</span>
+<span class="c1"># specific language governing permissions and limitations</span>
+<span class="c1"># under the License.</span>
+
+<span class="kn">from</span> <span class="nn">__future__</span> <span class="k">import</span> <span class="n">print_function</span><span class="p">,</span> <span class="n">unicode_literals</span>
+
+<span class="kn">import</span> <span class="nn">contextlib</span>
+<span class="kn">import</span> <span class="nn">os</span>
+
+<span class="kn">from</span> <span class="nn">six.moves</span> <span class="k">import</span> <span class="nb">zip</span>
+<span class="kn">from</span> <span class="nn">past.builtins</span> <span class="k">import</span> <span class="n">basestring</span><span class="p">,</span> <span class="n">unicode</span>
+
+<span class="kn">import</span> <span class="nn">unicodecsv</span> <span class="k">as</span> <span class="nn">csv</span>
+<span class="kn">import</span> <span class="nn">re</span>
+<span class="kn">import</span> <span class="nn">six</span>
+<span class="kn">import</span> <span class="nn">subprocess</span>
+<span class="kn">import</span> <span class="nn">time</span>
+<span class="kn">from</span> <span class="nn">collections</span> <span class="k">import</span> <span class="n">OrderedDict</span>
+<span class="kn">from</span> <span class="nn">tempfile</span> <span class="k">import</span> <span class="n">NamedTemporaryFile</span>
+<span class="kn">import</span> <span class="nn">hmsclient</span>
+
+<span class="kn">from</span> <span class="nn">airflow</span> <span class="k">import</span> <span class="n">configuration</span> <span class="k">as</span> <span class="n">conf</span>
+<span class="kn">from</span> <span class="nn">airflow.exceptions</span> <span class="k">import</span> <span class="n">AirflowException</span>
+<span class="kn">from</span> <span class="nn">airflow.hooks.base_hook</span> <span class="k">import</span> <span class="n">BaseHook</span>
+<span class="kn">from</span> <span class="nn">airflow.utils.helpers</span> <span class="k">import</span> <span class="n">as_flattened_list</span>
+<span class="kn">from</span> <span class="nn">airflow.utils.file</span> <span class="k">import</span> <span class="n">TemporaryDirectory</span>
+<span class="kn">from</span> <span class="nn">airflow</span> <span class="k">import</span> <span class="n">configuration</span>
+<span class="kn">import</span> <span class="nn">airflow.security.utils</span> <span class="k">as</span> <span class="nn">utils</span>
+
+<span class="n">HIVE_QUEUE_PRIORITIES</span> <span class="o">=</span> <span class="p">[</span><span class="s1">'VERY_HIGH'</span><span class="p">,</span> <span class="s1">'HIGH'</span><span class="p">,</span> <span class="s1">'NORMAL'</span><span class="p">,</span> <span class="s1">'LOW'</span><span class="p">,</span> <span class="s1">'VERY_LOW'</span><span class="p">]</span>
+
+
+<div class="viewcode-block" id="HiveCliHook"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveCliHook">[docs]</a><span class="k">class</span> <span class="nc">HiveCliHook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">):</span>
+ <span class="sd">"""Simple wrapper around the hive CLI.</span>
+
+<span class="sd"> It also supports the ``beeline``</span>
+<span class="sd"> a lighter CLI that runs JDBC and is replacing the heavier</span>
+<span class="sd"> traditional CLI. To enable ``beeline``, set the use_beeline param in the</span>
+<span class="sd"> extra field of your connection as in ``{ "use_beeline": true }``</span>
+
+<span class="sd"> Note that you can also set default hive CLI parameters using the</span>
+<span class="sd"> ``hive_cli_params`` to be used in your connection as in</span>
+<span class="sd"> ``{"hive_cli_params": "-hiveconf mapred.job.tracker=some.jobtracker:444"}``</span>
+<span class="sd"> Parameters passed here can be overridden by run_cli's hive_conf param</span>
+
+<span class="sd"> The extra connection parameter ``auth`` gets passed as in the ``jdbc``</span>
+<span class="sd"> connection string as is.</span>
+
+<span class="sd"> :param mapred_queue: queue used by the Hadoop Scheduler (Capacity or Fair)</span>
+<span class="sd"> :type mapred_queue: string</span>
+<span class="sd"> :param mapred_queue_priority: priority within the job queue.</span>
+<span class="sd"> Possible settings include: VERY_HIGH, HIGH, NORMAL, LOW, VERY_LOW</span>
+<span class="sd"> :type mapred_queue_priority: string</span>
+<span class="sd"> :param mapred_job_name: This name will appear in the jobtracker.</span>
+<span class="sd"> This can make monitoring easier.</span>
+<span class="sd"> :type mapred_job_name: string</span>
+<span class="sd"> """</span>
+
+ <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span>
+ <span class="bp">self</span><span class="p">,</span>
+ <span class="n">hive_cli_conn_id</span><span class="o">=</span><span class="s2">"hive_cli_default"</span><span class="p">,</span>
+ <span class="n">run_as</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">mapred_queue</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">mapred_queue_priority</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">mapred_job_name</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
+ <span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="n">hive_cli_conn_id</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_params</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'hive_cli_params'</span><span class="p">,</span> <span class="s1">''</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">use_beeline</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'use_beeline'</span><span class="p">,</span> <span class="kc">False</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">auth</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'auth'</span><span class="p">,</span> <span class="s1">'noSasl'</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">conn</span> <span class="o">=</span> <span class="n">conn</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">run_as</span> <span class="o">=</span> <span class="n">run_as</span>
+
+ <span class="k">if</span> <span class="n">mapred_queue_priority</span><span class="p">:</span>
+ <span class="n">mapred_queue_priority</span> <span class="o">=</span> <span class="n">mapred_queue_priority</span><span class="o">.</span><span class="n">upper</span><span class="p">()</span>
+ <span class="k">if</span> <span class="n">mapred_queue_priority</span> <span class="ow">not</span> <span class="ow">in</span> <span class="n">HIVE_QUEUE_PRIORITIES</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span>
+ <span class="s2">"Invalid Mapred Queue Priority. Valid values are: "</span>
+ <span class="s2">"</span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="s1">', '</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">HIVE_QUEUE_PRIORITIES</span><span class="p">)))</span>
+
+ <span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span> <span class="o">=</span> <span class="n">mapred_queue</span> <span class="ow">or</span> <span class="n">conf</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'hive'</span><span class="p">,</span>
+ <span class="s1">'default_hive_mapred_queue'</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue_priority</span> <span class="o">=</span> <span class="n">mapred_queue_priority</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">mapred_job_name</span> <span class="o">=</span> <span class="n">mapred_job_name</span>
+
+ <span class="k">def</span> <span class="nf">_prepare_cli_cmd</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> This function creates the command list from available information</span>
+<span class="sd"> """</span>
+ <span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conn</span>
+ <span class="n">hive_bin</span> <span class="o">=</span> <span class="s1">'hive'</span>
+ <span class="n">cmd_extra</span> <span class="o">=</span> <span class="p">[]</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">use_beeline</span><span class="p">:</span>
+ <span class="n">hive_bin</span> <span class="o">=</span> <span class="s1">'beeline'</span>
+ <span class="n">jdbc_url</span> <span class="o">=</span> <span class="s2">"jdbc:hive2://</span><span class="si">{conn.host}</span><span class="s2">:</span><span class="si">{conn.port}</span><span class="s2">/</span><span class="si">{conn.schema}</span><span class="s2">"</span>
+ <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">conf</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span><span class="p">:</span>
+ <span class="n">template</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span>
+ <span class="s1">'principal'</span><span class="p">,</span> <span class="s2">"hive/_HOST@EXAMPLE.COM"</span><span class="p">)</span>
+ <span class="k">if</span> <span class="s2">"_HOST"</span> <span class="ow">in</span> <span class="n">template</span><span class="p">:</span>
+ <span class="n">template</span> <span class="o">=</span> <span class="n">utils</span><span class="o">.</span><span class="n">replace_hostname_pattern</span><span class="p">(</span>
+ <span class="n">utils</span><span class="o">.</span><span class="n">get_components</span><span class="p">(</span><span class="n">template</span><span class="p">))</span>
+
+ <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">""</span> <span class="c1"># noqa</span>
+ <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'proxy_user'</span><span class="p">)</span> <span class="o">==</span> <span class="s2">"login"</span> <span class="ow">and</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">:</span>
+ <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">"hive.server2.proxy.user=</span><span class="si">{0}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">)</span>
+ <span class="k">elif</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'proxy_user'</span><span class="p">)</span> <span class="o">==</span> <span class="s2">"owner"</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">run_as</span><span class="p">:</span>
+ <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">"hive.server2.proxy.user=</span><span class="si">{0}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">run_as</span><span class="p">)</span>
+
+ <span class="n">jdbc_url</span> <span class="o">+=</span> <span class="s2">";principal=</span><span class="si">{template}</span><span class="s2">;</span><span class="si">{proxy_user}</span><span class="s2">"</span>
+ <span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">auth</span><span class="p">:</span>
+ <span class="n">jdbc_url</span> <span class="o">+=</span> <span class="s2">";auth="</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">auth</span>
+
+ <span class="n">jdbc_url</span> <span class="o">=</span> <span class="n">jdbc_url</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span>
+
+ <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-u'</span><span class="p">,</span> <span class="n">jdbc_url</span><span class="p">]</span>
+ <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">:</span>
+ <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-n'</span><span class="p">,</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">]</span>
+ <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">password</span><span class="p">:</span>
+ <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-p'</span><span class="p">,</span> <span class="n">conn</span><span class="o">.</span><span class="n">password</span><span class="p">]</span>
+
+ <span class="n">hive_params_list</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_params</span><span class="o">.</span><span class="n">split</span><span class="p">()</span>
+
+ <span class="k">return</span> <span class="p">[</span><span class="n">hive_bin</span><span class="p">]</span> <span class="o">+</span> <span class="n">cmd_extra</span> <span class="o">+</span> <span class="n">hive_params_list</span>
+
+ <span class="k">def</span> <span class="nf">_prepare_hiveconf</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">d</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> This function prepares a list of hiveconf params</span>
+<span class="sd"> from a dictionary of key value pairs.</span>
+
+<span class="sd"> :param d:</span>
+<span class="sd"> :type d: dict</span>
+
+<span class="sd"> >>> hh = HiveCliHook()</span>
+<span class="sd"> >>> hive_conf = {"hive.exec.dynamic.partition": "true",</span>
+<span class="sd"> ... "hive.exec.dynamic.partition.mode": "nonstrict"}</span>
+<span class="sd"> >>> hh._prepare_hiveconf(hive_conf)</span>
+<span class="sd"> ["-hiveconf", "hive.exec.dynamic.partition=true",\</span>
+<span class="sd"> "-hiveconf", "hive.exec.dynamic.partition.mode=nonstrict"]</span>
+<span class="sd"> """</span>
+ <span class="k">if</span> <span class="ow">not</span> <span class="n">d</span><span class="p">:</span>
+ <span class="k">return</span> <span class="p">[]</span>
+ <span class="k">return</span> <span class="n">as_flattened_list</span><span class="p">(</span>
+ <span class="nb">zip</span><span class="p">([</span><span class="s2">"-hiveconf"</span><span class="p">]</span> <span class="o">*</span> <span class="nb">len</span><span class="p">(</span><span class="n">d</span><span class="p">),</span>
+ <span class="p">[</span><span class="s2">"</span><span class="si">{}</span><span class="s2">=</span><span class="si">{}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">d</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span>
+ <span class="p">)</span>
+
+<div class="viewcode-block" id="HiveCliHook.run_cli"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveCliHook.run_cli">[docs]</a> <span class="k">def</span> <span class="nf">run_cli</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span> <span class="n">hive_conf</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Run an hql statement using the hive cli. If hive_conf is specified</span>
+<span class="sd"> it should be a dict and the entries will be set as key/value pairs</span>
+<span class="sd"> in HiveConf</span>
+
+
+<span class="sd"> :param hive_conf: if specified these key value pairs will be passed</span>
+<span class="sd"> to hive as ``-hiveconf "key"="value"``. Note that they will be</span>
+<span class="sd"> passed after the ``hive_cli_params`` and thus will override</span>
+<span class="sd"> whatever values are specified in the database.</span>
+<span class="sd"> :type hive_conf: dict</span>
+
+<span class="sd"> >>> hh = HiveCliHook()</span>
+<span class="sd"> >>> result = hh.run_cli("USE airflow;")</span>
+<span class="sd"> >>> ("OK" in result)</span>
+<span class="sd"> True</span>
+<span class="sd"> """</span>
+ <span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conn</span>
+ <span class="n">schema</span> <span class="o">=</span> <span class="n">schema</span> <span class="ow">or</span> <span class="n">conn</span><span class="o">.</span><span class="n">schema</span>
+ <span class="k">if</span> <span class="n">schema</span><span class="p">:</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="s2">"USE </span><span class="si">{schema}</span><span class="s2">;</span><span class="se">\n</span><span class="si">{hql}</span><span class="s2">"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span>
+
+ <span class="k">with</span> <span class="n">TemporaryDirectory</span><span class="p">(</span><span class="n">prefix</span><span class="o">=</span><span class="s1">'airflow_hiveop_'</span><span class="p">)</span> <span class="k">as</span> <span class="n">tmp_dir</span><span class="p">:</span>
+ <span class="k">with</span> <span class="n">NamedTemporaryFile</span><span class="p">(</span><span class="nb">dir</span><span class="o">=</span><span class="n">tmp_dir</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span>
+ <span class="n">f</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">hql</span><span class="o">.</span><span class="n">encode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">))</span>
+ <span class="n">f</span><span class="o">.</span><span class="n">flush</span><span class="p">()</span>
+ <span class="n">hive_cmd</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_prepare_cli_cmd</span><span class="p">()</span>
+ <span class="n">hive_conf_params</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">_prepare_hiveconf</span><span class="p">(</span><span class="n">hive_conf</span><span class="p">)</span>
+ <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span><span class="p">:</span>
+ <span class="n">hive_conf_params</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span>
+ <span class="p">[</span><span class="s1">'-hiveconf'</span><span class="p">,</span>
+ <span class="s1">'mapreduce.job.queuename=</span><span class="si">{}</span><span class="s1">'</span>
+ <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span><span class="p">),</span>
+ <span class="s1">'-hiveconf'</span><span class="p">,</span>
+ <span class="s1">'mapred.job.queue.name=</span><span class="si">{}</span><span class="s1">'</span>
+ <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span><span class="p">),</span>
+ <span class="s1">'-hiveconf'</span><span class="p">,</span>
+ <span class="s1">'tez.job.queue.name=</span><span class="si">{}</span><span class="s1">'</span>
+ <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue</span><span class="p">)</span>
+ <span class="p">])</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue_priority</span><span class="p">:</span>
+ <span class="n">hive_conf_params</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span>
+ <span class="p">[</span><span class="s1">'-hiveconf'</span><span class="p">,</span>
+ <span class="s1">'mapreduce.job.priority=</span><span class="si">{}</span><span class="s1">'</span>
+ <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_queue_priority</span><span class="p">)])</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">mapred_job_name</span><span class="p">:</span>
+ <span class="n">hive_conf_params</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span>
+ <span class="p">[</span><span class="s1">'-hiveconf'</span><span class="p">,</span>
+ <span class="s1">'mapred.job.name=</span><span class="si">{}</span><span class="s1">'</span>
+ <span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">mapred_job_name</span><span class="p">)])</span>
+
+ <span class="n">hive_cmd</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">hive_conf_params</span><span class="p">)</span>
+ <span class="n">hive_cmd</span><span class="o">.</span><span class="n">extend</span><span class="p">([</span><span class="s1">'-f'</span><span class="p">,</span> <span class="n">f</span><span class="o">.</span><span class="n">name</span><span class="p">])</span>
+
+ <span class="k">if</span> <span class="n">verbose</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">" "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">hive_cmd</span><span class="p">))</span>
+ <span class="n">sp</span> <span class="o">=</span> <span class="n">subprocess</span><span class="o">.</span><span class="n">Popen</span><span class="p">(</span>
+ <span class="n">hive_cmd</span><span class="p">,</span>
+ <span class="n">stdout</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">,</span>
+ <span class="n">stderr</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">STDOUT</span><span class="p">,</span>
+ <span class="n">cwd</span><span class="o">=</span><span class="n">tmp_dir</span><span class="p">,</span>
+ <span class="n">close_fds</span><span class="o">=</span><span class="kc">True</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">sp</span> <span class="o">=</span> <span class="n">sp</span>
+ <span class="n">stdout</span> <span class="o">=</span> <span class="s1">''</span>
+ <span class="k">while</span> <span class="kc">True</span><span class="p">:</span>
+ <span class="n">line</span> <span class="o">=</span> <span class="n">sp</span><span class="o">.</span><span class="n">stdout</span><span class="o">.</span><span class="n">readline</span><span class="p">()</span>
+ <span class="k">if</span> <span class="ow">not</span> <span class="n">line</span><span class="p">:</span>
+ <span class="k">break</span>
+ <span class="n">stdout</span> <span class="o">+=</span> <span class="n">line</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">verbose</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">line</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">)</span><span class="o">.</span><span class="n">strip</span><span class="p">())</span>
+ <span class="n">sp</span><span class="o">.</span><span class="n">wait</span><span class="p">()</span>
+
+ <span class="k">if</span> <span class="n">sp</span><span class="o">.</span><span class="n">returncode</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="n">stdout</span><span class="p">)</span>
+
+ <span class="k">return</span> <span class="n">stdout</span></div>
+
+<div class="viewcode-block" id="HiveCliHook.test_hql"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveCliHook.test_hql">[docs]</a> <span class="k">def</span> <span class="nf">test_hql</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Test an hql statement using the hive cli and EXPLAIN</span>
+
+<span class="sd"> """</span>
+ <span class="n">create</span><span class="p">,</span> <span class="n">insert</span><span class="p">,</span> <span class="n">other</span> <span class="o">=</span> <span class="p">[],</span> <span class="p">[],</span> <span class="p">[]</span>
+ <span class="k">for</span> <span class="n">query</span> <span class="ow">in</span> <span class="n">hql</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">';'</span><span class="p">):</span> <span class="c1"># naive</span>
+ <span class="n">query_original</span> <span class="o">=</span> <span class="n">query</span>
+ <span class="n">query</span> <span class="o">=</span> <span class="n">query</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span><span class="o">.</span><span class="n">strip</span><span class="p">()</span>
+
+ <span class="k">if</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'create table'</span><span class="p">):</span>
+ <span class="n">create</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span>
+ <span class="k">elif</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">((</span><span class="s1">'set '</span><span class="p">,</span>
+ <span class="s1">'add jar '</span><span class="p">,</span>
+ <span class="s1">'create temporary function'</span><span class="p">)):</span>
+ <span class="n">other</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span>
+ <span class="k">elif</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'insert'</span><span class="p">):</span>
+ <span class="n">insert</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span>
+ <span class="n">other</span> <span class="o">=</span> <span class="s1">';'</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">other</span><span class="p">)</span>
+ <span class="k">for</span> <span class="n">query_set</span> <span class="ow">in</span> <span class="p">[</span><span class="n">create</span><span class="p">,</span> <span class="n">insert</span><span class="p">]:</span>
+ <span class="k">for</span> <span class="n">query</span> <span class="ow">in</span> <span class="n">query_set</span><span class="p">:</span>
+
+ <span class="n">query_preview</span> <span class="o">=</span> <span class="s1">' '</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">())[:</span><span class="mi">50</span><span class="p">]</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Testing HQL [</span><span class="si">%s</span><span class="s2"> (...)]"</span><span class="p">,</span> <span class="n">query_preview</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">query_set</span> <span class="o">==</span> <span class="n">insert</span><span class="p">:</span>
+ <span class="n">query</span> <span class="o">=</span> <span class="n">other</span> <span class="o">+</span> <span class="s1">'; explain '</span> <span class="o">+</span> <span class="n">query</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">query</span> <span class="o">=</span> <span class="s1">'explain '</span> <span class="o">+</span> <span class="n">query</span>
+ <span class="k">try</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">query</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="kc">False</span><span class="p">)</span>
+ <span class="k">except</span> <span class="n">AirflowException</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
+ <span class="n">message</span> <span class="o">=</span> <span class="n">e</span><span class="o">.</span><span class="n">args</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)[</span><span class="o">-</span><span class="mi">2</span><span class="p">]</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">message</span><span class="p">)</span>
+ <span class="n">error_loc</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">search</span><span class="p">(</span><span class="s1">'(\d+):(\d+)'</span><span class="p">,</span> <span class="n">message</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">error_loc</span> <span class="ow">and</span> <span class="n">error_loc</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">isdigit</span><span class="p">():</span>
+ <span class="n">lst</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">error_loc</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">))</span>
+ <span class="n">begin</span> <span class="o">=</span> <span class="nb">max</span><span class="p">(</span><span class="n">lst</span> <span class="o">-</span> <span class="mi">2</span><span class="p">,</span> <span class="mi">0</span><span class="p">)</span>
+ <span class="n">end</span> <span class="o">=</span> <span class="nb">min</span><span class="p">(</span><span class="n">lst</span> <span class="o">+</span> <span class="mi">3</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)))</span>
+ <span class="n">context</span> <span class="o">=</span> <span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)[</span><span class="n">begin</span><span class="p">:</span><span class="n">end</span><span class="p">])</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Context :</span><span class="se">\n</span><span class="s2"> </span><span class="si">%s</span><span class="s2">"</span><span class="p">,</span> <span class="n">context</span><span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"SUCCESS"</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="HiveCliHook.load_df"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveCliHook.load_df">[docs]</a> <span class="k">def</span> <span class="nf">load_df</span><span class="p">(</span>
+ <span class="bp">self</span><span class="p">,</span>
+ <span class="n">df</span><span class="p">,</span>
+ <span class="n">table</span><span class="p">,</span>
+ <span class="n">field_dict</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">delimiter</span><span class="o">=</span><span class="s1">','</span><span class="p">,</span>
+ <span class="n">encoding</span><span class="o">=</span><span class="s1">'utf8'</span><span class="p">,</span>
+ <span class="n">pandas_kwargs</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Loads a pandas DataFrame into hive.</span>
+
+<span class="sd"> Hive data types will be inferred if not passed but column names will</span>
+<span class="sd"> not be sanitized.</span>
+
+<span class="sd"> :param df: DataFrame to load into a Hive table</span>
+<span class="sd"> :type df: DataFrame</span>
+<span class="sd"> :param table: target Hive table, use dot notation to target a</span>
+<span class="sd"> specific database</span>
+<span class="sd"> :type table: str</span>
+<span class="sd"> :param field_dict: mapping from column name to hive data type.</span>
+<span class="sd"> Note that it must be OrderedDict so as to keep columns' order.</span>
+<span class="sd"> :type field_dict: OrderedDict</span>
+<span class="sd"> :param delimiter: field delimiter in the file</span>
+<span class="sd"> :type delimiter: str</span>
+<span class="sd"> :param encoding: string encoding to use when writing DataFrame to file</span>
+<span class="sd"> :type encoding: str</span>
+<span class="sd"> :param pandas_kwargs: passed to DataFrame.to_csv</span>
+<span class="sd"> :type pandas_kwargs: dict</span>
+<span class="sd"> :param kwargs: passed to self.load_file</span>
+<span class="sd"> """</span>
+
+ <span class="k">def</span> <span class="nf">_infer_field_types_from_df</span><span class="p">(</span><span class="n">df</span><span class="p">):</span>
+ <span class="n">DTYPE_KIND_HIVE_TYPE</span> <span class="o">=</span> <span class="p">{</span>
+ <span class="s1">'b'</span><span class="p">:</span> <span class="s1">'BOOLEAN'</span><span class="p">,</span> <span class="c1"># boolean</span>
+ <span class="s1">'i'</span><span class="p">:</span> <span class="s1">'BIGINT'</span><span class="p">,</span> <span class="c1"># signed integer</span>
+ <span class="s1">'u'</span><span class="p">:</span> <span class="s1">'BIGINT'</span><span class="p">,</span> <span class="c1"># unsigned integer</span>
+ <span class="s1">'f'</span><span class="p">:</span> <span class="s1">'DOUBLE'</span><span class="p">,</span> <span class="c1"># floating-point</span>
+ <span class="s1">'c'</span><span class="p">:</span> <span class="s1">'STRING'</span><span class="p">,</span> <span class="c1"># complex floating-point</span>
+ <span class="s1">'M'</span><span class="p">:</span> <span class="s1">'TIMESTAMP'</span><span class="p">,</span> <span class="c1"># datetime</span>
+ <span class="s1">'O'</span><span class="p">:</span> <span class="s1">'STRING'</span><span class="p">,</span> <span class="c1"># object</span>
+ <span class="s1">'S'</span><span class="p">:</span> <span class="s1">'STRING'</span><span class="p">,</span> <span class="c1"># (byte-)string</span>
+ <span class="s1">'U'</span><span class="p">:</span> <span class="s1">'STRING'</span><span class="p">,</span> <span class="c1"># Unicode</span>
+ <span class="s1">'V'</span><span class="p">:</span> <span class="s1">'STRING'</span> <span class="c1"># void</span>
+ <span class="p">}</span>
+
+ <span class="n">d</span> <span class="o">=</span> <span class="n">OrderedDict</span><span class="p">()</span>
+ <span class="k">for</span> <span class="n">col</span><span class="p">,</span> <span class="n">dtype</span> <span class="ow">in</span> <span class="n">df</span><span class="o">.</span><span class="n">dtypes</span><span class="o">.</span><span class="n">iteritems</span><span class="p">():</span>
+ <span class="n">d</span><span class="p">[</span><span class="n">col</span><span class="p">]</span> <span class="o">=</span> <span class="n">DTYPE_KIND_HIVE_TYPE</span><span class="p">[</span><span class="n">dtype</span><span class="o">.</span><span class="n">kind</span><span class="p">]</span>
+ <span class="k">return</span> <span class="n">d</span>
+
+ <span class="k">if</span> <span class="n">pandas_kwargs</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
+ <span class="n">pandas_kwargs</span> <span class="o">=</span> <span class="p">{}</span>
+
+ <span class="k">with</span> <span class="n">TemporaryDirectory</span><span class="p">(</span><span class="n">prefix</span><span class="o">=</span><span class="s1">'airflow_hiveop_'</span><span class="p">)</span> <span class="k">as</span> <span class="n">tmp_dir</span><span class="p">:</span>
+ <span class="k">with</span> <span class="n">NamedTemporaryFile</span><span class="p">(</span><span class="nb">dir</span><span class="o">=</span><span class="n">tmp_dir</span><span class="p">,</span> <span class="n">mode</span><span class="o">=</span><span class="s2">"w"</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span>
+
+ <span class="k">if</span> <span class="n">field_dict</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
+ <span class="n">field_dict</span> <span class="o">=</span> <span class="n">_infer_field_types_from_df</span><span class="p">(</span><span class="n">df</span><span class="p">)</span>
+
+ <span class="n">df</span><span class="o">.</span><span class="n">to_csv</span><span class="p">(</span><span class="n">path_or_buf</span><span class="o">=</span><span class="n">f</span><span class="p">,</span>
+ <span class="n">sep</span><span class="o">=</span><span class="p">(</span><span class="n">delimiter</span><span class="o">.</span><span class="n">encode</span><span class="p">(</span><span class="n">encoding</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">six</span><span class="o">.</span><span class="n">PY2</span> <span class="ow">and</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">delimiter</span><span class="p">,</span> <span class="n">unicode</span><span class="p">)</span>
+ <span class="k">else</span> <span class="n">delimiter</span><span class="p">),</span>
+ <span class="n">header</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
+ <span class="n">index</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
+ <span class="n">encoding</span><span class="o">=</span><span class="n">encoding</span><span class="p">,</span>
+ <span class="n">date_format</span><span class="o">=</span><span class="s2">"%Y-%m-</span><span class="si">%d</span><span class="s2"> %H:%M:%S"</span><span class="p">,</span>
+ <span class="o">**</span><span class="n">pandas_kwargs</span><span class="p">)</span>
+ <span class="n">f</span><span class="o">.</span><span class="n">flush</span><span class="p">()</span>
+
+ <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">load_file</span><span class="p">(</span><span class="n">filepath</span><span class="o">=</span><span class="n">f</span><span class="o">.</span><span class="n">name</span><span class="p">,</span>
+ <span class="n">table</span><span class="o">=</span><span class="n">table</span><span class="p">,</span>
+ <span class="n">delimiter</span><span class="o">=</span><span class="n">delimiter</span><span class="p">,</span>
+ <span class="n">field_dict</span><span class="o">=</span><span class="n">field_dict</span><span class="p">,</span>
+ <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="HiveCliHook.load_file"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveCliHook.load_file">[docs]</a> <span class="k">def</span> <span class="nf">load_file</span><span class="p">(</span>
+ <span class="bp">self</span><span class="p">,</span>
+ <span class="n">filepath</span><span class="p">,</span>
+ <span class="n">table</span><span class="p">,</span>
+ <span class="n">delimiter</span><span class="o">=</span><span class="s2">","</span><span class="p">,</span>
+ <span class="n">field_dict</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">create</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
+ <span class="n">overwrite</span><span class="o">=</span><span class="kc">True</span><span class="p">,</span>
+ <span class="n">partition</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
+ <span class="n">recreate</span><span class="o">=</span><span class="kc">False</span><span class="p">,</span>
+ <span class="n">tblproperties</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Loads a local file into Hive</span>
+
+<span class="sd"> Note that the table generated in Hive uses ``STORED AS textfile``</span>
+<span class="sd"> which isn't the most efficient serialization format. If a</span>
+<span class="sd"> large amount of data is loaded and/or if the tables gets</span>
+<span class="sd"> queried considerably, you may want to use this operator only to</span>
+<span class="sd"> stage the data into a temporary table before loading it into its</span>
+<span class="sd"> final destination using a ``HiveOperator``.</span>
+
+<span class="sd"> :param filepath: local filepath of the file to load</span>
+<span class="sd"> :type filepath: str</span>
+<span class="sd"> :param table: target Hive table, use dot notation to target a</span>
+<span class="sd"> specific database</span>
+<span class="sd"> :type table: str</span>
+<span class="sd"> :param delimiter: field delimiter in the file</span>
+<span class="sd"> :type delimiter: str</span>
+<span class="sd"> :param field_dict: A dictionary of the fields name in the file</span>
+<span class="sd"> as keys and their Hive types as values.</span>
+<span class="sd"> Note that it must be OrderedDict so as to keep columns' order.</span>
+<span class="sd"> :type field_dict: OrderedDict</span>
+<span class="sd"> :param create: whether to create the table if it doesn't exist</span>
+<span class="sd"> :type create: bool</span>
+<span class="sd"> :param overwrite: whether to overwrite the data in table or partition</span>
+<span class="sd"> :type overwrite: bool</span>
+<span class="sd"> :param partition: target partition as a dict of partition columns</span>
+<span class="sd"> and values</span>
+<span class="sd"> :type partition: dict</span>
+<span class="sd"> :param recreate: whether to drop and recreate the table at every</span>
+<span class="sd"> execution</span>
+<span class="sd"> :type recreate: bool</span>
+<span class="sd"> :param tblproperties: TBLPROPERTIES of the hive table being created</span>
+<span class="sd"> :type tblproperties: dict</span>
+<span class="sd"> """</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="s1">''</span>
+ <span class="k">if</span> <span class="n">recreate</span><span class="p">:</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"DROP TABLE IF EXISTS </span><span class="si">{table}</span><span class="s2">;</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="k">if</span> <span class="n">create</span> <span class="ow">or</span> <span class="n">recreate</span><span class="p">:</span>
+ <span class="k">if</span> <span class="n">field_dict</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="ne">ValueError</span><span class="p">(</span><span class="s2">"Must provide a field dict when creating a table"</span><span class="p">)</span>
+ <span class="n">fields</span> <span class="o">=</span> <span class="s2">",</span><span class="se">\n</span><span class="s2"> "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span>
+ <span class="p">[</span><span class="n">k</span> <span class="o">+</span> <span class="s1">' '</span> <span class="o">+</span> <span class="n">v</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">field_dict</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"CREATE TABLE IF NOT EXISTS </span><span class="si">{table}</span><span class="s2"> (</span><span class="se">\n</span><span class="si">{fields}</span><span class="s2">)</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="k">if</span> <span class="n">partition</span><span class="p">:</span>
+ <span class="n">pfields</span> <span class="o">=</span> <span class="s2">",</span><span class="se">\n</span><span class="s2"> "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span>
+ <span class="p">[</span><span class="n">p</span> <span class="o">+</span> <span class="s2">" STRING"</span> <span class="k">for</span> <span class="n">p</span> <span class="ow">in</span> <span class="n">partition</span><span class="p">])</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"PARTITIONED BY (</span><span class="si">{pfields}</span><span class="s2">)</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"ROW FORMAT DELIMITED</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"FIELDS TERMINATED BY '</span><span class="si">{delimiter}</span><span class="s2">'</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"STORED AS textfile</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="k">if</span> <span class="n">tblproperties</span> <span class="ow">is</span> <span class="ow">not</span> <span class="kc">None</span><span class="p">:</span>
+ <span class="n">tprops</span> <span class="o">=</span> <span class="s2">", "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span>
+ <span class="p">[</span><span class="s2">"'</span><span class="si">{0}</span><span class="s2">'='</span><span class="si">{1}</span><span class="s2">'"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">tblproperties</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"TBLPROPERTIES(</span><span class="si">{tprops}</span><span class="s2">)</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">";"</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="n">hql</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="s2">"LOAD DATA LOCAL INPATH '</span><span class="si">{filepath}</span><span class="s2">' "</span>
+ <span class="k">if</span> <span class="n">overwrite</span><span class="p">:</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"OVERWRITE "</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"INTO TABLE </span><span class="si">{table}</span><span class="s2"> "</span>
+ <span class="k">if</span> <span class="n">partition</span><span class="p">:</span>
+ <span class="n">pvals</span> <span class="o">=</span> <span class="s2">", "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span>
+ <span class="p">[</span><span class="s2">"</span><span class="si">{0}</span><span class="s2">='</span><span class="si">{1}</span><span class="s2">'"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">partition</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"PARTITION (</span><span class="si">{pvals}</span><span class="s2">);"</span>
+
+ <span class="c1"># As a workaround for HIVE-10541, add a newline character</span>
+ <span class="c1"># at the end of hql (AIRFLOW-2412).</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s1">'</span><span class="se">\n</span><span class="s1">'</span>
+
+ <span class="n">hql</span> <span class="o">=</span> <span class="n">hql</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span></div>
+
+ <span class="k">def</span> <span class="nf">kill</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s1">'sp'</span><span class="p">):</span>
+ <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">sp</span><span class="o">.</span><span class="n">poll</span><span class="p">()</span> <span class="ow">is</span> <span class="kc">None</span><span class="p">:</span>
+ <span class="nb">print</span><span class="p">(</span><span class="s2">"Killing the Hive job"</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">sp</span><span class="o">.</span><span class="n">terminate</span><span class="p">()</span>
+ <span class="n">time</span><span class="o">.</span><span class="n">sleep</span><span class="p">(</span><span class="mi">60</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">sp</span><span class="o">.</span><span class="n">kill</span><span class="p">()</span></div>
+
+
+<div class="viewcode-block" id="HiveMetastoreHook"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveMetastoreHook">[docs]</a><span class="k">class</span> <span class="nc">HiveMetastoreHook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">):</span>
+ <span class="sd">""" Wrapper to interact with the Hive Metastore"""</span>
+
+ <span class="c1"># java short max val</span>
+ <span class="n">MAX_PART_COUNT</span> <span class="o">=</span> <span class="mi">32767</span>
+
+ <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">metastore_conn_id</span><span class="o">=</span><span class="s1">'metastore_default'</span><span class="p">):</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore_conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="n">metastore_conn_id</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_metastore_client</span><span class="p">()</span>
+
+ <span class="k">def</span> <span class="nf">__getstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="c1"># This is for pickling to work despite the thirft hive client not</span>
+ <span class="c1"># being pickable</span>
+ <span class="n">d</span> <span class="o">=</span> <span class="nb">dict</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">)</span>
+ <span class="k">del</span> <span class="n">d</span><span class="p">[</span><span class="s1">'metastore'</span><span class="p">]</span>
+ <span class="k">return</span> <span class="n">d</span>
+
+ <span class="k">def</span> <span class="nf">__setstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">d</span><span class="p">):</span>
+ <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">d</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="vm">__dict__</span><span class="p">[</span><span class="s1">'metastore'</span><span class="p">]</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_metastore_client</span><span class="p">()</span>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_metastore_client"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_metastore_client">[docs]</a> <span class="k">def</span> <span class="nf">get_metastore_client</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Returns a Hive thrift client.</span>
+<span class="sd"> """</span>
+ <span class="kn">from</span> <span class="nn">thrift.transport</span> <span class="k">import</span> <span class="n">TSocket</span><span class="p">,</span> <span class="n">TTransport</span>
+ <span class="kn">from</span> <span class="nn">thrift.protocol</span> <span class="k">import</span> <span class="n">TBinaryProtocol</span>
+ <span class="n">ms</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore_conn</span>
+ <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'NOSASL'</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">conf</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span><span class="p">:</span>
+ <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'GSSAPI'</span><span class="p">)</span>
+ <span class="n">kerberos_service_name</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'kerberos_service_name'</span><span class="p">,</span> <span class="s1">'hive'</span><span class="p">)</span>
+
+ <span class="n">socket</span> <span class="o">=</span> <span class="n">TSocket</span><span class="o">.</span><span class="n">TSocket</span><span class="p">(</span><span class="n">ms</span><span class="o">.</span><span class="n">host</span><span class="p">,</span> <span class="n">ms</span><span class="o">.</span><span class="n">port</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">conf</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span> \
+ <span class="ow">and</span> <span class="n">auth_mechanism</span> <span class="o">==</span> <span class="s1">'GSSAPI'</span><span class="p">:</span>
+ <span class="k">try</span><span class="p">:</span>
+ <span class="kn">import</span> <span class="nn">saslwrapper</span> <span class="k">as</span> <span class="nn">sasl</span>
+ <span class="k">except</span> <span class="ne">ImportError</span><span class="p">:</span>
+ <span class="kn">import</span> <span class="nn">sasl</span>
+
+ <span class="k">def</span> <span class="nf">sasl_factory</span><span class="p">():</span>
+ <span class="n">sasl_client</span> <span class="o">=</span> <span class="n">sasl</span><span class="o">.</span><span class="n">Client</span><span class="p">()</span>
+ <span class="n">sasl_client</span><span class="o">.</span><span class="n">setAttr</span><span class="p">(</span><span class="s2">"host"</span><span class="p">,</span> <span class="n">ms</span><span class="o">.</span><span class="n">host</span><span class="p">)</span>
+ <span class="n">sasl_client</span><span class="o">.</span><span class="n">setAttr</span><span class="p">(</span><span class="s2">"service"</span><span class="p">,</span> <span class="n">kerberos_service_name</span><span class="p">)</span>
+ <span class="n">sasl_client</span><span class="o">.</span><span class="n">init</span><span class="p">()</span>
+ <span class="k">return</span> <span class="n">sasl_client</span>
+
+ <span class="kn">from</span> <span class="nn">thrift_sasl</span> <span class="k">import</span> <span class="n">TSaslClientTransport</span>
+ <span class="n">transport</span> <span class="o">=</span> <span class="n">TSaslClientTransport</span><span class="p">(</span><span class="n">sasl_factory</span><span class="p">,</span> <span class="s2">"GSSAPI"</span><span class="p">,</span> <span class="n">socket</span><span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">transport</span> <span class="o">=</span> <span class="n">TTransport</span><span class="o">.</span><span class="n">TBufferedTransport</span><span class="p">(</span><span class="n">socket</span><span class="p">)</span>
+
+ <span class="n">protocol</span> <span class="o">=</span> <span class="n">TBinaryProtocol</span><span class="o">.</span><span class="n">TBinaryProtocol</span><span class="p">(</span><span class="n">transport</span><span class="p">)</span>
+
+ <span class="k">return</span> <span class="n">hmsclient</span><span class="o">.</span><span class="n">HMSClient</span><span class="p">(</span><span class="n">iprot</span><span class="o">=</span><span class="n">protocol</span><span class="p">)</span></div>
+
+ <span class="k">def</span> <span class="nf">get_conn</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span>
+
+<div class="viewcode-block" id="HiveMetastoreHook.check_for_partition"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveMetastoreHook.check_for_partition">[docs]</a> <span class="k">def</span> <span class="nf">check_for_partition</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Checks whether a partition exists</span>
+
+<span class="sd"> :param schema: Name of hive schema (database) @table belongs to</span>
+<span class="sd"> :type schema: string</span>
+<span class="sd"> :param table: Name of hive table @partition belongs to</span>
+<span class="sd"> :type schema: string</span>
+<span class="sd"> :partition: Expression that matches the partitions to check for</span>
+<span class="sd"> (eg `a = 'b' AND c = 'd'`)</span>
+<span class="sd"> :type schema: string</span>
+<span class="sd"> :rtype: boolean</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> t = 'static_babynames_partitioned'</span>
+<span class="sd"> >>> hh.check_for_partition('airflow', t, "ds='2015-01-01'")</span>
+<span class="sd"> True</span>
+<span class="sd"> """</span>
+ <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span>
+ <span class="n">partitions</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_partitions_by_filter</span><span class="p">(</span>
+ <span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span>
+
+ <span class="k">if</span> <span class="n">partitions</span><span class="p">:</span>
+ <span class="k">return</span> <span class="kc">True</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="k">return</span> <span class="kc">False</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.check_for_named_partition"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveMetastoreHook.check_for_named_partition">[docs]</a> <span class="k">def</span> <span class="nf">check_for_named_partition</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition_name</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Checks whether a partition with a given name exists</span>
+
+<span class="sd"> :param schema: Name of hive schema (database) @table belongs to</span>
+<span class="sd"> :type schema: string</span>
+<span class="sd"> :param table: Name of hive table @partition belongs to</span>
+<span class="sd"> :type schema: string</span>
+<span class="sd"> :partition: Name of the partitions to check for (eg `a=b/c=d`)</span>
+<span class="sd"> :type schema: string</span>
+<span class="sd"> :rtype: boolean</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> t = 'static_babynames_partitioned'</span>
+<span class="sd"> >>> hh.check_for_named_partition('airflow', t, "ds=2015-01-01")</span>
+<span class="sd"> True</span>
+<span class="sd"> >>> hh.check_for_named_partition('airflow', t, "ds=xxx")</span>
+<span class="sd"> False</span>
+<span class="sd"> """</span>
+ <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span>
+ <span class="k">return</span> <span class="n">client</span><span class="o">.</span><span class="n">check_for_named_partition</span><span class="p">(</span><span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition_name</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_table"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_table">[docs]</a> <span class="k">def</span> <span class="nf">get_table</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="n">db</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span>
+ <span class="sd">"""Get a metastore table object</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> t = hh.get_table(db='airflow', table_name='static_babynames')</span>
+<span class="sd"> >>> t.tableName</span>
+<span class="sd"> 'static_babynames'</span>
+<span class="sd"> >>> [col.name for col in t.sd.cols]</span>
+<span class="sd"> ['state', 'year', 'name', 'gender', 'num']</span>
+<span class="sd"> """</span>
+ <span class="k">if</span> <span class="n">db</span> <span class="o">==</span> <span class="s1">'default'</span> <span class="ow">and</span> <span class="s1">'.'</span> <span class="ow">in</span> <span class="n">table_name</span><span class="p">:</span>
+ <span class="n">db</span><span class="p">,</span> <span class="n">table_name</span> <span class="o">=</span> <span class="n">table_name</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'.'</span><span class="p">)[:</span><span class="mi">2</span><span class="p">]</span>
+ <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span>
+ <span class="k">return</span> <span class="n">client</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">dbname</span><span class="o">=</span><span class="n">db</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_tables"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_tables">[docs]</a> <span class="k">def</span> <span class="nf">get_tables</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">db</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="s1">'*'</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Get a metastore table object</span>
+<span class="sd"> """</span>
+ <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span>
+ <span class="n">tables</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_tables</span><span class="p">(</span><span class="n">db_name</span><span class="o">=</span><span class="n">db</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="n">pattern</span><span class="p">)</span>
+ <span class="k">return</span> <span class="n">client</span><span class="o">.</span><span class="n">get_table_objects_by_name</span><span class="p">(</span><span class="n">db</span><span class="p">,</span> <span class="n">tables</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_databases"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_databases">[docs]</a> <span class="k">def</span> <span class="nf">get_databases</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="s1">'*'</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Get a metastore table object</span>
+<span class="sd"> """</span>
+ <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span>
+ <span class="k">return</span> <span class="n">client</span><span class="o">.</span><span class="n">get_databases</span><span class="p">(</span><span class="n">pattern</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_partitions"><a class="viewcode-back" href="../../../code.html#airflow.hooks.hive_hooks.HiveMetastoreHook.get_partitions">[docs]</a> <span class="k">def</span> <span class="nf">get_partitions</span><span class="p">(</span>
+ <span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="nb">filter</span><span class="o">=</span><span class="kc">None</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Returns a list of all partitions in a table. Works only</span>
+<span class="sd"> for tables with less than 32767 (java short max val).</span>
+<span class="sd"> For subpartitioned table, the number might easily exceed this.</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> t = 'static_babynames_partitioned'</span>
+<span class="sd"> >>> parts = hh.get_partitions(schema='airflow', table_name=t)</span>
+<span class="sd"> >>> len(parts)</span>
+<span class="sd"> 1</span>
+<span class="sd"> >>> parts</span>
+<span class="sd"> [{'ds': '2015-01-01'}]</span>
+<span class="sd"> """</span>
+ <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="k">as</span> <span class="n">client</span><span class="p">:</span>
+ <span class="n">table</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">dbname</span><span class="o">=</span><span class="n">schema</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">)</span>
+ <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">table</span><span class="o">.</span><span class="n">partitionKeys</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">"The table isn't partitioned"</span><span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="k">if</span> <span class="nb">filter</span><span class="p">:</span>
+ <span class="n">parts</span> <span class="o">=</span> <span class="n">client</span><span class="o">.</span><span class="n">get_partitions_by_filter</span><span class="p">(</span>
+ <span class="n">db_name</spa
<TRUNCATED>