You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ma...@apache.org on 2016/06/05 05:24:15 UTC
[25/34] incubator-airflow-site git commit: Initial commit
http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/9e19165c/_modules/hive_hooks.html
----------------------------------------------------------------------
diff --git a/_modules/hive_hooks.html b/_modules/hive_hooks.html
new file mode 100644
index 0000000..15c45e2
--- /dev/null
+++ b/_modules/hive_hooks.html
@@ -0,0 +1,743 @@
+
+
+<!DOCTYPE html>
+<!--[if IE 8]><html class="no-js lt-ie9" lang="en" > <![endif]-->
+<!--[if gt IE 8]><!--> <html class="no-js" lang="en" > <!--<![endif]-->
+<head>
+ <meta charset="utf-8">
+
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
+
+ <title>hive_hooks — Airflow Documentation</title>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
+
+
+
+
+
+ <link rel="top" title="Airflow Documentation" href="../index.html"/>
+ <link rel="up" title="Module code" href="index.html"/>
+
+
+ <script src="../_static/js/modernizr.min.js"></script>
+
+</head>
+
+<body class="wy-body-for-nav" role="document">
+
+ <div class="wy-grid-for-nav">
+
+
+ <nav data-toggle="wy-nav-shift" class="wy-nav-side">
+ <div class="wy-side-scroll">
+ <div class="wy-side-nav-search">
+
+
+
+ <a href="../index.html" class="icon icon-home"> Airflow
+
+
+
+ </a>
+
+
+
+
+
+
+
+<div role="search">
+ <form id="rtd-search-form" class="wy-form" action="../search.html" method="get">
+ <input type="text" name="q" placeholder="Search docs" />
+ <input type="hidden" name="check_keywords" value="yes" />
+ <input type="hidden" name="area" value="default" />
+ </form>
+</div>
+
+
+ </div>
+
+ <div class="wy-menu wy-menu-vertical" data-spy="affix" role="navigation" aria-label="main navigation">
+
+
+
+ <ul>
+<li class="toctree-l1"><a class="reference internal" href="../project.html">Project</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../license.html">License</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../start.html">Quick Start</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../installation.html">Installation</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../tutorial.html">Tutorial</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../configuration.html">Configuration</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../ui.html">UI / Screenshots</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../concepts.html">Concepts</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../profiling.html">Data Profiling</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../cli.html">Command Line Interface</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../scheduler.html">Scheduling & Triggers</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../plugins.html">Plugins</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../security.html">Security</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../faq.html">FAQ</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../code.html">API Reference</a></li>
+</ul>
+
+
+
+ </div>
+ </div>
+ </nav>
+
+ <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
+
+
+ <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+ <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+ <a href="../index.html">Airflow</a>
+ </nav>
+
+
+
+ <div class="wy-nav-content">
+ <div class="rst-content">
+
+
+
+
+
+
+<div role="navigation" aria-label="breadcrumbs navigation">
+ <ul class="wy-breadcrumbs">
+ <li><a href="../index.html">Docs</a> »</li>
+
+ <li><a href="index.html">Module code</a> »</li>
+
+ <li>hive_hooks</li>
+ <li class="wy-breadcrumbs-aside">
+
+
+
+ </li>
+ </ul>
+ <hr/>
+</div>
+ <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
+ <div itemprop="articleBody">
+
+ <h1>Source code for hive_hooks</h1><div class="highlight"><pre>
+<span></span><span class="c1"># -*- coding: utf-8 -*-</span>
+<span class="c1">#</span>
+<span class="c1"># Licensed under the Apache License, Version 2.0 (the "License");</span>
+<span class="c1"># you may not use this file except in compliance with the License.</span>
+<span class="c1"># You may obtain a copy of the License at</span>
+<span class="c1">#</span>
+<span class="c1"># http://www.apache.org/licenses/LICENSE-2.0</span>
+<span class="c1">#</span>
+<span class="c1"># Unless required by applicable law or agreed to in writing, software</span>
+<span class="c1"># distributed under the License is distributed on an "AS IS" BASIS,</span>
+<span class="c1"># WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.</span>
+<span class="c1"># See the License for the specific language governing permissions and</span>
+<span class="c1"># limitations under the License.</span>
+<span class="c1">#</span>
+
+<span class="kn">from</span> <span class="nn">__future__</span> <span class="kn">import</span> <span class="n">print_function</span>
+<span class="kn">from</span> <span class="nn">builtins</span> <span class="kn">import</span> <span class="nb">zip</span>
+<span class="kn">from</span> <span class="nn">past.builtins</span> <span class="kn">import</span> <span class="nb">basestring</span>
+<span class="kn">import</span> <span class="nn">unicodecsv</span> <span class="kn">as</span> <span class="nn">csv</span>
+<span class="kn">import</span> <span class="nn">logging</span>
+<span class="kn">import</span> <span class="nn">re</span>
+<span class="kn">import</span> <span class="nn">subprocess</span>
+<span class="kn">from</span> <span class="nn">tempfile</span> <span class="kn">import</span> <span class="n">NamedTemporaryFile</span>
+
+<span class="kn">from</span> <span class="nn">airflow.exceptions</span> <span class="kn">import</span> <span class="n">AirflowException</span>
+<span class="kn">from</span> <span class="nn">airflow.hooks.base_hook</span> <span class="kn">import</span> <span class="n">BaseHook</span>
+<span class="kn">from</span> <span class="nn">airflow.utils.file</span> <span class="kn">import</span> <span class="n">TemporaryDirectory</span>
+<span class="kn">from</span> <span class="nn">airflow</span> <span class="kn">import</span> <span class="n">configuration</span>
+<span class="kn">import</span> <span class="nn">airflow.security.utils</span> <span class="kn">as</span> <span class="nn">utils</span>
+
+
+<div class="viewcode-block" id="HiveCliHook"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveCliHook">[docs]</a><span class="k">class</span> <span class="nc">HiveCliHook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">):</span>
+
+ <span class="sd">"""Simple wrapper around the hive CLI.</span>
+
+<span class="sd"> It also supports the ``beeline``</span>
+<span class="sd"> a lighter CLI that runs JDBC and is replacing the heavier</span>
+<span class="sd"> traditional CLI. To enable ``beeline``, set the use_beeline param in the</span>
+<span class="sd"> extra field of your connection as in ``{ "use_beeline": true }``</span>
+
+<span class="sd"> Note that you can also set default hive CLI parameters using the</span>
+<span class="sd"> ``hive_cli_params`` to be used in your connection as in</span>
+<span class="sd"> ``{"hive_cli_params": "-hiveconf mapred.job.tracker=some.jobtracker:444"}``</span>
+
+<span class="sd"> The extra connection parameter ``auth`` gets passed as in the ``jdbc``</span>
+<span class="sd"> connection string as is.</span>
+<span class="sd"> """</span>
+
+ <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span>
+ <span class="bp">self</span><span class="p">,</span>
+ <span class="n">hive_cli_conn_id</span><span class="o">=</span><span class="s2">"hive_cli_default"</span><span class="p">,</span>
+ <span class="n">run_as</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+ <span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="n">hive_cli_conn_id</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_params</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'hive_cli_params'</span><span class="p">,</span> <span class="s1">''</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">use_beeline</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'use_beeline'</span><span class="p">,</span> <span class="bp">False</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">auth</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'auth'</span><span class="p">,</span> <span class="s1">'noSasl'</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">conn</span> <span class="o">=</span> <span class="n">conn</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">run_as</span> <span class="o">=</span> <span class="n">run_as</span>
+
+<div class="viewcode-block" id="HiveCliHook.run_cli"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveCliHook.run_cli">[docs]</a> <span class="k">def</span> <span class="nf">run_cli</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="bp">True</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Run an hql statement using the hive cli</span>
+
+<span class="sd"> >>> hh = HiveCliHook()</span>
+<span class="sd"> >>> result = hh.run_cli("USE airflow;")</span>
+<span class="sd"> >>> ("OK" in result)</span>
+<span class="sd"> True</span>
+<span class="sd"> """</span>
+ <span class="n">conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">conn</span>
+ <span class="n">schema</span> <span class="o">=</span> <span class="n">schema</span> <span class="ow">or</span> <span class="n">conn</span><span class="o">.</span><span class="n">schema</span>
+ <span class="k">if</span> <span class="n">schema</span><span class="p">:</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="s2">"USE {schema};</span><span class="se">\n</span><span class="s2">{hql}"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span>
+
+ <span class="k">with</span> <span class="n">TemporaryDirectory</span><span class="p">(</span><span class="n">prefix</span><span class="o">=</span><span class="s1">'airflow_hiveop_'</span><span class="p">)</span> <span class="k">as</span> <span class="n">tmp_dir</span><span class="p">:</span>
+ <span class="k">with</span> <span class="n">NamedTemporaryFile</span><span class="p">(</span><span class="nb">dir</span><span class="o">=</span><span class="n">tmp_dir</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span>
+ <span class="n">f</span><span class="o">.</span><span class="n">write</span><span class="p">(</span><span class="n">hql</span><span class="o">.</span><span class="n">encode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">))</span>
+ <span class="n">f</span><span class="o">.</span><span class="n">flush</span><span class="p">()</span>
+ <span class="n">fname</span> <span class="o">=</span> <span class="n">f</span><span class="o">.</span><span class="n">name</span>
+ <span class="n">hive_bin</span> <span class="o">=</span> <span class="s1">'hive'</span>
+ <span class="n">cmd_extra</span> <span class="o">=</span> <span class="p">[]</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">use_beeline</span><span class="p">:</span>
+ <span class="n">hive_bin</span> <span class="o">=</span> <span class="s1">'beeline'</span>
+ <span class="n">jdbc_url</span> <span class="o">=</span> <span class="s2">"jdbc:hive2://{conn.host}:{conn.port}/{conn.schema}"</span>
+ <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span><span class="p">:</span>
+ <span class="n">template</span> <span class="o">=</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span>
+ <span class="s1">'principal'</span><span class="p">,</span> <span class="s2">"hive/_HOST@EXAMPLE.COM"</span><span class="p">)</span>
+ <span class="k">if</span> <span class="s2">"_HOST"</span> <span class="ow">in</span> <span class="n">template</span><span class="p">:</span>
+ <span class="n">template</span> <span class="o">=</span> <span class="n">utils</span><span class="o">.</span><span class="n">replace_hostname_pattern</span><span class="p">(</span>
+ <span class="n">utils</span><span class="o">.</span><span class="n">get_components</span><span class="p">(</span><span class="n">template</span><span class="p">))</span>
+
+ <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">""</span> <span class="c1"># noqa</span>
+ <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'proxy_user'</span><span class="p">)</span> <span class="o">==</span> <span class="s2">"login"</span> <span class="ow">and</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">:</span>
+ <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">"hive.server2.proxy.user={0}"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">)</span>
+ <span class="k">elif</span> <span class="n">conn</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'proxy_user'</span><span class="p">)</span> <span class="o">==</span> <span class="s2">"owner"</span> <span class="ow">and</span> <span class="bp">self</span><span class="o">.</span><span class="n">run_as</span><span class="p">:</span>
+ <span class="n">proxy_user</span> <span class="o">=</span> <span class="s2">"hive.server2.proxy.user={0}"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">run_as</span><span class="p">)</span>
+
+ <span class="n">jdbc_url</span> <span class="o">+=</span> <span class="s2">";principal={template};{proxy_user}"</span>
+ <span class="k">elif</span> <span class="bp">self</span><span class="o">.</span><span class="n">auth</span><span class="p">:</span>
+ <span class="n">jdbc_url</span> <span class="o">+=</span> <span class="s2">";auth="</span> <span class="o">+</span> <span class="bp">self</span><span class="o">.</span><span class="n">auth</span>
+
+ <span class="n">jdbc_url</span> <span class="o">=</span> <span class="n">jdbc_url</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span>
+
+ <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-u'</span><span class="p">,</span> <span class="n">jdbc_url</span><span class="p">]</span>
+ <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">:</span>
+ <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-n'</span><span class="p">,</span> <span class="n">conn</span><span class="o">.</span><span class="n">login</span><span class="p">]</span>
+ <span class="k">if</span> <span class="n">conn</span><span class="o">.</span><span class="n">password</span><span class="p">:</span>
+ <span class="n">cmd_extra</span> <span class="o">+=</span> <span class="p">[</span><span class="s1">'-p'</span><span class="p">,</span> <span class="n">conn</span><span class="o">.</span><span class="n">password</span><span class="p">]</span>
+
+ <span class="n">hive_cmd</span> <span class="o">=</span> <span class="p">[</span><span class="n">hive_bin</span><span class="p">,</span> <span class="s1">'-f'</span><span class="p">,</span> <span class="n">fname</span><span class="p">]</span> <span class="o">+</span> <span class="n">cmd_extra</span>
+
+ <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_params</span><span class="p">:</span>
+ <span class="n">hive_params_list</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">hive_cli_params</span><span class="o">.</span><span class="n">split</span><span class="p">()</span>
+ <span class="n">hive_cmd</span><span class="o">.</span><span class="n">extend</span><span class="p">(</span><span class="n">hive_params_list</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">verbose</span><span class="p">:</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">" "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">hive_cmd</span><span class="p">))</span>
+ <span class="n">sp</span> <span class="o">=</span> <span class="n">subprocess</span><span class="o">.</span><span class="n">Popen</span><span class="p">(</span>
+ <span class="n">hive_cmd</span><span class="p">,</span>
+ <span class="n">stdout</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">PIPE</span><span class="p">,</span>
+ <span class="n">stderr</span><span class="o">=</span><span class="n">subprocess</span><span class="o">.</span><span class="n">STDOUT</span><span class="p">,</span>
+ <span class="n">cwd</span><span class="o">=</span><span class="n">tmp_dir</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">sp</span> <span class="o">=</span> <span class="n">sp</span>
+ <span class="n">stdout</span> <span class="o">=</span> <span class="s1">''</span>
+ <span class="k">while</span> <span class="bp">True</span><span class="p">:</span>
+ <span class="n">line</span> <span class="o">=</span> <span class="n">sp</span><span class="o">.</span><span class="n">stdout</span><span class="o">.</span><span class="n">readline</span><span class="p">()</span>
+ <span class="k">if</span> <span class="ow">not</span> <span class="n">line</span><span class="p">:</span>
+ <span class="k">break</span>
+ <span class="n">stdout</span> <span class="o">+=</span> <span class="n">line</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">verbose</span><span class="p">:</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">line</span><span class="o">.</span><span class="n">decode</span><span class="p">(</span><span class="s1">'UTF-8'</span><span class="p">)</span><span class="o">.</span><span class="n">strip</span><span class="p">())</span>
+ <span class="n">sp</span><span class="o">.</span><span class="n">wait</span><span class="p">()</span>
+
+ <span class="k">if</span> <span class="n">sp</span><span class="o">.</span><span class="n">returncode</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="n">stdout</span><span class="p">)</span>
+
+ <span class="k">return</span> <span class="n">stdout</span></div>
+
+<div class="viewcode-block" id="HiveCliHook.test_hql"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveCliHook.test_hql">[docs]</a> <span class="k">def</span> <span class="nf">test_hql</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Test an hql statement using the hive cli and EXPLAIN</span>
+
+<span class="sd"> """</span>
+ <span class="n">create</span><span class="p">,</span> <span class="n">insert</span><span class="p">,</span> <span class="n">other</span> <span class="o">=</span> <span class="p">[],</span> <span class="p">[],</span> <span class="p">[]</span>
+ <span class="k">for</span> <span class="n">query</span> <span class="ow">in</span> <span class="n">hql</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">';'</span><span class="p">):</span> <span class="c1"># naive</span>
+ <span class="n">query_original</span> <span class="o">=</span> <span class="n">query</span>
+ <span class="n">query</span> <span class="o">=</span> <span class="n">query</span><span class="o">.</span><span class="n">lower</span><span class="p">()</span><span class="o">.</span><span class="n">strip</span><span class="p">()</span>
+
+ <span class="k">if</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'create table'</span><span class="p">):</span>
+ <span class="n">create</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span>
+ <span class="k">elif</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">((</span><span class="s1">'set '</span><span class="p">,</span>
+ <span class="s1">'add jar '</span><span class="p">,</span>
+ <span class="s1">'create temporary function'</span><span class="p">)):</span>
+ <span class="n">other</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span>
+ <span class="k">elif</span> <span class="n">query</span><span class="o">.</span><span class="n">startswith</span><span class="p">(</span><span class="s1">'insert'</span><span class="p">):</span>
+ <span class="n">insert</span><span class="o">.</span><span class="n">append</span><span class="p">(</span><span class="n">query_original</span><span class="p">)</span>
+ <span class="n">other</span> <span class="o">=</span> <span class="s1">';'</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">other</span><span class="p">)</span>
+ <span class="k">for</span> <span class="n">query_set</span> <span class="ow">in</span> <span class="p">[</span><span class="n">create</span><span class="p">,</span> <span class="n">insert</span><span class="p">]:</span>
+ <span class="k">for</span> <span class="n">query</span> <span class="ow">in</span> <span class="n">query_set</span><span class="p">:</span>
+
+ <span class="n">query_preview</span> <span class="o">=</span> <span class="s1">' '</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">())[:</span><span class="mi">50</span><span class="p">]</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Testing HQL [{0} (...)]"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">query_preview</span><span class="p">))</span>
+ <span class="k">if</span> <span class="n">query_set</span> <span class="o">==</span> <span class="n">insert</span><span class="p">:</span>
+ <span class="n">query</span> <span class="o">=</span> <span class="n">other</span> <span class="o">+</span> <span class="s1">'; explain '</span> <span class="o">+</span> <span class="n">query</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">query</span> <span class="o">=</span> <span class="s1">'explain '</span> <span class="o">+</span> <span class="n">query</span>
+ <span class="k">try</span><span class="p">:</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">query</span><span class="p">,</span> <span class="n">verbose</span><span class="o">=</span><span class="bp">False</span><span class="p">)</span>
+ <span class="k">except</span> <span class="n">AirflowException</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
+ <span class="n">message</span> <span class="o">=</span> <span class="n">e</span><span class="o">.</span><span class="n">args</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)[</span><span class="o">-</span><span class="mi">2</span><span class="p">]</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">message</span><span class="p">)</span>
+ <span class="n">error_loc</span> <span class="o">=</span> <span class="n">re</span><span class="o">.</span><span class="n">search</span><span class="p">(</span><span class="s1">'(\d+):(\d+)'</span><span class="p">,</span> <span class="n">message</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">error_loc</span> <span class="ow">and</span> <span class="n">error_loc</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">)</span><span class="o">.</span><span class="n">isdigit</span><span class="p">():</span>
+ <span class="n">l</span> <span class="o">=</span> <span class="nb">int</span><span class="p">(</span><span class="n">error_loc</span><span class="o">.</span><span class="n">group</span><span class="p">(</span><span class="mi">1</span><span class="p">))</span>
+ <span class="n">begin</span> <span class="o">=</span> <span class="nb">max</span><span class="p">(</span><span class="n">l</span><span class="o">-</span><span class="mi">2</span><span class="p">,</span> <span class="mi">0</span><span class="p">)</span>
+ <span class="n">end</span> <span class="o">=</span> <span class="nb">min</span><span class="p">(</span><span class="n">l</span><span class="o">+</span><span class="mi">3</span><span class="p">,</span> <span class="nb">len</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)))</span>
+ <span class="n">context</span> <span class="o">=</span> <span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="o">.</span><span class="n">join</span><span class="p">(</span><span class="n">query</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'</span><span class="se">\n</span><span class="s1">'</span><span class="p">)[</span><span class="n">begin</span><span class="p">:</span><span class="n">end</span><span class="p">])</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Context :</span><span class="se">\n</span><span class="s2"> {0}"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">context</span><span class="p">))</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"SUCCESS"</span><span class="p">)</span></div>
+
+<div class="viewcode-block" id="HiveCliHook.load_file"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveCliHook.load_file">[docs]</a> <span class="k">def</span> <span class="nf">load_file</span><span class="p">(</span>
+ <span class="bp">self</span><span class="p">,</span>
+ <span class="n">filepath</span><span class="p">,</span>
+ <span class="n">table</span><span class="p">,</span>
+ <span class="n">delimiter</span><span class="o">=</span><span class="s2">","</span><span class="p">,</span>
+ <span class="n">field_dict</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span>
+ <span class="n">create</span><span class="o">=</span><span class="bp">True</span><span class="p">,</span>
+ <span class="n">overwrite</span><span class="o">=</span><span class="bp">True</span><span class="p">,</span>
+ <span class="n">partition</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span>
+ <span class="n">recreate</span><span class="o">=</span><span class="bp">False</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Loads a local file into Hive</span>
+
+<span class="sd"> Note that the table generated in Hive uses ``STORED AS textfile``</span>
+<span class="sd"> which isn't the most efficient serialization format. If a</span>
+<span class="sd"> large amount of data is loaded and/or if the tables gets</span>
+<span class="sd"> queried considerably, you may want to use this operator only to</span>
+<span class="sd"> stage the data into a temporary table before loading it into its</span>
+<span class="sd"> final destination using a ``HiveOperator``.</span>
+
+<span class="sd"> :param table: target Hive table, use dot notation to target a</span>
+<span class="sd"> specific database</span>
+<span class="sd"> :type table: str</span>
+<span class="sd"> :param create: whether to create the table if it doesn't exist</span>
+<span class="sd"> :type create: bool</span>
+<span class="sd"> :param recreate: whether to drop and recreate the table at every</span>
+<span class="sd"> execution</span>
+<span class="sd"> :type recreate: bool</span>
+<span class="sd"> :param partition: target partition as a dict of partition columns</span>
+<span class="sd"> and values</span>
+<span class="sd"> :type partition: dict</span>
+<span class="sd"> :param delimiter: field delimiter in the file</span>
+<span class="sd"> :type delimiter: str</span>
+<span class="sd"> """</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="s1">''</span>
+ <span class="k">if</span> <span class="n">recreate</span><span class="p">:</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"DROP TABLE IF EXISTS {table};</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="k">if</span> <span class="n">create</span> <span class="ow">or</span> <span class="n">recreate</span><span class="p">:</span>
+ <span class="n">fields</span> <span class="o">=</span> <span class="s2">",</span><span class="se">\n</span><span class="s2"> "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span>
+ <span class="p">[</span><span class="n">k</span> <span class="o">+</span> <span class="s1">' '</span> <span class="o">+</span> <span class="n">v</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">field_dict</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"CREATE TABLE IF NOT EXISTS {table} (</span><span class="se">\n</span><span class="s2">{fields})</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="k">if</span> <span class="n">partition</span><span class="p">:</span>
+ <span class="n">pfields</span> <span class="o">=</span> <span class="s2">",</span><span class="se">\n</span><span class="s2"> "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span>
+ <span class="p">[</span><span class="n">p</span> <span class="o">+</span> <span class="s2">" STRING"</span> <span class="k">for</span> <span class="n">p</span> <span class="ow">in</span> <span class="n">partition</span><span class="p">])</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"PARTITIONED BY ({pfields})</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"ROW FORMAT DELIMITED</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"FIELDS TERMINATED BY '{delimiter}'</span><span class="se">\n</span><span class="s2">"</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"STORED AS textfile;"</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="n">hql</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="s2">"LOAD DATA LOCAL INPATH '{filepath}' "</span>
+ <span class="k">if</span> <span class="n">overwrite</span><span class="p">:</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"OVERWRITE "</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"INTO TABLE {table} "</span>
+ <span class="k">if</span> <span class="n">partition</span><span class="p">:</span>
+ <span class="n">pvals</span> <span class="o">=</span> <span class="s2">", "</span><span class="o">.</span><span class="n">join</span><span class="p">(</span>
+ <span class="p">[</span><span class="s2">"{0}='{1}'"</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">k</span><span class="p">,</span> <span class="n">v</span><span class="p">)</span> <span class="k">for</span> <span class="n">k</span><span class="p">,</span> <span class="n">v</span> <span class="ow">in</span> <span class="n">partition</span><span class="o">.</span><span class="n">items</span><span class="p">()])</span>
+ <span class="n">hql</span> <span class="o">+=</span> <span class="s2">"PARTITION ({pvals});"</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="n">hql</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="o">**</span><span class="nb">locals</span><span class="p">())</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">run_cli</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span></div>
+
+ <span class="k">def</span> <span class="nf">kill</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="k">if</span> <span class="nb">hasattr</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="s1">'sp'</span><span class="p">):</span>
+ <span class="k">if</span> <span class="bp">self</span><span class="o">.</span><span class="n">sp</span><span class="o">.</span><span class="n">poll</span><span class="p">()</span> <span class="ow">is</span> <span class="bp">None</span><span class="p">:</span>
+ <span class="k">print</span><span class="p">(</span><span class="s2">"Killing the Hive job"</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">sp</span><span class="o">.</span><span class="n">kill</span><span class="p">()</span></div>
+
+
+<div class="viewcode-block" id="HiveMetastoreHook"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook">[docs]</a><span class="k">class</span> <span class="nc">HiveMetastoreHook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">):</span>
+
+ <span class="sd">""" Wrapper to interact with the Hive Metastore"""</span>
+
+ <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">metastore_conn_id</span><span class="o">=</span><span class="s1">'metastore_default'</span><span class="p">):</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore_conn</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="n">metastore_conn_id</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_metastore_client</span><span class="p">()</span>
+
+ <span class="k">def</span> <span class="nf">__getstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="c1"># This is for pickling to work despite the thirft hive client not</span>
+ <span class="c1"># being pickable</span>
+ <span class="n">d</span> <span class="o">=</span> <span class="nb">dict</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">__dict__</span><span class="p">)</span>
+ <span class="k">del</span> <span class="n">d</span><span class="p">[</span><span class="s1">'metastore'</span><span class="p">]</span>
+ <span class="k">return</span> <span class="n">d</span>
+
+ <span class="k">def</span> <span class="nf">__setstate__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">d</span><span class="p">):</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">__dict__</span><span class="o">.</span><span class="n">update</span><span class="p">(</span><span class="n">d</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">__dict__</span><span class="p">[</span><span class="s1">'metastore'</span><span class="p">]</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_metastore_client</span><span class="p">()</span>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_metastore_client"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook.get_metastore_client">[docs]</a> <span class="k">def</span> <span class="nf">get_metastore_client</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Returns a Hive thrift client.</span>
+<span class="sd"> """</span>
+ <span class="kn">from</span> <span class="nn">thrift.transport</span> <span class="kn">import</span> <span class="n">TSocket</span><span class="p">,</span> <span class="n">TTransport</span>
+ <span class="kn">from</span> <span class="nn">thrift.protocol</span> <span class="kn">import</span> <span class="n">TBinaryProtocol</span>
+ <span class="kn">from</span> <span class="nn">hive_service</span> <span class="kn">import</span> <span class="n">ThriftHive</span>
+ <span class="n">ms</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore_conn</span>
+ <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'NOSASL'</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span><span class="p">:</span>
+ <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'GSSAPI'</span><span class="p">)</span>
+ <span class="n">kerberos_service_name</span> <span class="o">=</span> <span class="n">ms</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'kerberos_service_name'</span><span class="p">,</span> <span class="s1">'hive'</span><span class="p">)</span>
+
+ <span class="n">socket</span> <span class="o">=</span> <span class="n">TSocket</span><span class="o">.</span><span class="n">TSocket</span><span class="p">(</span><span class="n">ms</span><span class="o">.</span><span class="n">host</span><span class="p">,</span> <span class="n">ms</span><span class="o">.</span><span class="n">port</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span> <span class="ow">and</span> <span class="n">auth_mechanism</span> <span class="o">==</span> <span class="s1">'GSSAPI'</span><span class="p">:</span>
+ <span class="k">try</span><span class="p">:</span>
+ <span class="kn">import</span> <span class="nn">saslwrapper</span> <span class="kn">as</span> <span class="nn">sasl</span>
+ <span class="k">except</span> <span class="ne">ImportError</span><span class="p">:</span>
+ <span class="kn">import</span> <span class="nn">sasl</span>
+
+ <span class="k">def</span> <span class="nf">sasl_factory</span><span class="p">():</span>
+ <span class="n">sasl_client</span> <span class="o">=</span> <span class="n">sasl</span><span class="o">.</span><span class="n">Client</span><span class="p">()</span>
+ <span class="n">sasl_client</span><span class="o">.</span><span class="n">setAttr</span><span class="p">(</span><span class="s2">"host"</span><span class="p">,</span> <span class="n">ms</span><span class="o">.</span><span class="n">host</span><span class="p">)</span>
+ <span class="n">sasl_client</span><span class="o">.</span><span class="n">setAttr</span><span class="p">(</span><span class="s2">"service"</span><span class="p">,</span> <span class="n">kerberos_service_name</span><span class="p">)</span>
+ <span class="n">sasl_client</span><span class="o">.</span><span class="n">init</span><span class="p">()</span>
+ <span class="k">return</span> <span class="n">sasl_client</span>
+
+ <span class="kn">from</span> <span class="nn">thrift_sasl</span> <span class="kn">import</span> <span class="n">TSaslClientTransport</span>
+ <span class="n">transport</span> <span class="o">=</span> <span class="n">TSaslClientTransport</span><span class="p">(</span><span class="n">sasl_factory</span><span class="p">,</span> <span class="s2">"GSSAPI"</span><span class="p">,</span> <span class="n">socket</span><span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">transport</span> <span class="o">=</span> <span class="n">TTransport</span><span class="o">.</span><span class="n">TBufferedTransport</span><span class="p">(</span><span class="n">socket</span><span class="p">)</span>
+
+ <span class="n">protocol</span> <span class="o">=</span> <span class="n">TBinaryProtocol</span><span class="o">.</span><span class="n">TBinaryProtocol</span><span class="p">(</span><span class="n">transport</span><span class="p">)</span>
+
+ <span class="k">return</span> <span class="n">ThriftHive</span><span class="o">.</span><span class="n">Client</span><span class="p">(</span><span class="n">protocol</span><span class="p">)</span></div>
+
+ <span class="k">def</span> <span class="nf">get_conn</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span>
+
+<div class="viewcode-block" id="HiveMetastoreHook.check_for_partition"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook.check_for_partition">[docs]</a> <span class="k">def</span> <span class="nf">check_for_partition</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition</span><span class="p">):</span>
+ <span class="sd">"""Checks whether a partition exists</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> t = 'static_babynames_partitioned'</span>
+<span class="sd"> >>> hh.check_for_partition('airflow', t, "ds='2015-01-01'")</span>
+<span class="sd"> True</span>
+<span class="sd"> """</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">open</span><span class="p">()</span>
+ <span class="n">partitions</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">get_partitions_by_filter</span><span class="p">(</span>
+ <span class="n">schema</span><span class="p">,</span> <span class="n">table</span><span class="p">,</span> <span class="n">partition</span><span class="p">,</span> <span class="mi">1</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">close</span><span class="p">()</span>
+ <span class="k">if</span> <span class="n">partitions</span><span class="p">:</span>
+ <span class="k">return</span> <span class="bp">True</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="k">return</span> <span class="bp">False</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_table"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook.get_table">[docs]</a> <span class="k">def</span> <span class="nf">get_table</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="n">db</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span>
+ <span class="sd">"""Get a metastore table object</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> t = hh.get_table(db='airflow', table_name='static_babynames')</span>
+<span class="sd"> >>> t.tableName</span>
+<span class="sd"> 'static_babynames'</span>
+<span class="sd"> >>> [col.name for col in t.sd.cols]</span>
+<span class="sd"> ['state', 'year', 'name', 'gender', 'num']</span>
+<span class="sd"> """</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">open</span><span class="p">()</span>
+ <span class="k">if</span> <span class="n">db</span> <span class="o">==</span> <span class="s1">'default'</span> <span class="ow">and</span> <span class="s1">'.'</span> <span class="ow">in</span> <span class="n">table_name</span><span class="p">:</span>
+ <span class="n">db</span><span class="p">,</span> <span class="n">table_name</span> <span class="o">=</span> <span class="n">table_name</span><span class="o">.</span><span class="n">split</span><span class="p">(</span><span class="s1">'.'</span><span class="p">)[:</span><span class="mi">2</span><span class="p">]</span>
+ <span class="n">table</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">dbname</span><span class="o">=</span><span class="n">db</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">close</span><span class="p">()</span>
+ <span class="k">return</span> <span class="n">table</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_tables"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook.get_tables">[docs]</a> <span class="k">def</span> <span class="nf">get_tables</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">db</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="s1">'*'</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Get a metastore table object</span>
+<span class="sd"> """</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">open</span><span class="p">()</span>
+ <span class="n">tables</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">get_tables</span><span class="p">(</span><span class="n">db_name</span><span class="o">=</span><span class="n">db</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="n">pattern</span><span class="p">)</span>
+ <span class="n">objs</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">get_table_objects_by_name</span><span class="p">(</span><span class="n">db</span><span class="p">,</span> <span class="n">tables</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">close</span><span class="p">()</span>
+ <span class="k">return</span> <span class="n">objs</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_databases"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook.get_databases">[docs]</a> <span class="k">def</span> <span class="nf">get_databases</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">pattern</span><span class="o">=</span><span class="s1">'*'</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Get a metastore table object</span>
+<span class="sd"> """</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">open</span><span class="p">()</span>
+ <span class="n">dbs</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">get_databases</span><span class="p">(</span><span class="n">pattern</span><span class="p">)</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">close</span><span class="p">()</span>
+ <span class="k">return</span> <span class="n">dbs</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.get_partitions"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook.get_partitions">[docs]</a> <span class="k">def</span> <span class="nf">get_partitions</span><span class="p">(</span>
+ <span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="nb">filter</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Returns a list of all partitions in a table. Works only</span>
+<span class="sd"> for tables with less than 32767 (java short max val).</span>
+<span class="sd"> For subpartitioned table, the number might easily exceed this.</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> t = 'static_babynames_partitioned'</span>
+<span class="sd"> >>> parts = hh.get_partitions(schema='airflow', table_name=t)</span>
+<span class="sd"> >>> len(parts)</span>
+<span class="sd"> 1</span>
+<span class="sd"> >>> parts</span>
+<span class="sd"> [{'ds': '2015-01-01'}]</span>
+<span class="sd"> """</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">open</span><span class="p">()</span>
+ <span class="n">table</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">dbname</span><span class="o">=</span><span class="n">schema</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">)</span>
+ <span class="k">if</span> <span class="nb">len</span><span class="p">(</span><span class="n">table</span><span class="o">.</span><span class="n">partitionKeys</span><span class="p">)</span> <span class="o">==</span> <span class="mi">0</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">"The table isn't partitioned"</span><span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="k">if</span> <span class="nb">filter</span><span class="p">:</span>
+ <span class="n">parts</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">get_partitions_by_filter</span><span class="p">(</span>
+ <span class="n">db_name</span><span class="o">=</span><span class="n">schema</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">,</span>
+ <span class="nb">filter</span><span class="o">=</span><span class="nb">filter</span><span class="p">,</span> <span class="n">max_parts</span><span class="o">=</span><span class="mi">32767</span><span class="p">)</span>
+ <span class="k">else</span><span class="p">:</span>
+ <span class="n">parts</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">get_partitions</span><span class="p">(</span>
+ <span class="n">db_name</span><span class="o">=</span><span class="n">schema</span><span class="p">,</span> <span class="n">tbl_name</span><span class="o">=</span><span class="n">table_name</span><span class="p">,</span> <span class="n">max_parts</span><span class="o">=</span><span class="mi">32767</span><span class="p">)</span>
+
+ <span class="bp">self</span><span class="o">.</span><span class="n">metastore</span><span class="o">.</span><span class="n">_oprot</span><span class="o">.</span><span class="n">trans</span><span class="o">.</span><span class="n">close</span><span class="p">()</span>
+ <span class="n">pnames</span> <span class="o">=</span> <span class="p">[</span><span class="n">p</span><span class="o">.</span><span class="n">name</span> <span class="k">for</span> <span class="n">p</span> <span class="ow">in</span> <span class="n">table</span><span class="o">.</span><span class="n">partitionKeys</span><span class="p">]</span>
+ <span class="k">return</span> <span class="p">[</span><span class="nb">dict</span><span class="p">(</span><span class="nb">zip</span><span class="p">(</span><span class="n">pnames</span><span class="p">,</span> <span class="n">p</span><span class="o">.</span><span class="n">values</span><span class="p">))</span> <span class="k">for</span> <span class="n">p</span> <span class="ow">in</span> <span class="n">parts</span><span class="p">]</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.max_partition"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook.max_partition">[docs]</a> <span class="k">def</span> <span class="nf">max_partition</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">schema</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="n">field</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span> <span class="nb">filter</span><span class="o">=</span><span class="bp">None</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Returns the maximum value for all partitions in a table. Works only</span>
+<span class="sd"> for tables that have a single partition key. For subpartitioned</span>
+<span class="sd"> table, we recommend using signal tables.</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> t = 'static_babynames_partitioned'</span>
+<span class="sd"> >>> hh.max_partition(schema='airflow', table_name=t)</span>
+<span class="sd"> '2015-01-01'</span>
+<span class="sd"> """</span>
+ <span class="n">parts</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_partitions</span><span class="p">(</span><span class="n">schema</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="nb">filter</span><span class="p">)</span>
+ <span class="k">if</span> <span class="ow">not</span> <span class="n">parts</span><span class="p">:</span>
+ <span class="k">return</span> <span class="bp">None</span>
+ <span class="k">elif</span> <span class="nb">len</span><span class="p">(</span><span class="n">parts</span><span class="p">[</span><span class="mi">0</span><span class="p">])</span> <span class="o">==</span> <span class="mi">1</span><span class="p">:</span>
+ <span class="n">field</span> <span class="o">=</span> <span class="nb">list</span><span class="p">(</span><span class="n">parts</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span><span class="o">.</span><span class="n">keys</span><span class="p">())[</span><span class="mi">0</span><span class="p">]</span>
+ <span class="k">elif</span> <span class="ow">not</span> <span class="n">field</span><span class="p">:</span>
+ <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span>
+ <span class="s2">"Please specify the field you want the max "</span>
+ <span class="s2">"value for"</span><span class="p">)</span>
+
+ <span class="k">return</span> <span class="nb">max</span><span class="p">([</span><span class="n">p</span><span class="p">[</span><span class="n">field</span><span class="p">]</span> <span class="k">for</span> <span class="n">p</span> <span class="ow">in</span> <span class="n">parts</span><span class="p">])</span></div>
+
+<div class="viewcode-block" id="HiveMetastoreHook.table_exists"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveMetastoreHook.table_exists">[docs]</a> <span class="k">def</span> <span class="nf">table_exists</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">table_name</span><span class="p">,</span> <span class="n">db</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Check if table exists</span>
+
+<span class="sd"> >>> hh = HiveMetastoreHook()</span>
+<span class="sd"> >>> hh.table_exists(db='airflow', table_name='static_babynames')</span>
+<span class="sd"> True</span>
+<span class="sd"> >>> hh.table_exists(db='airflow', table_name='does_not_exist')</span>
+<span class="sd"> False</span>
+<span class="sd"> """</span>
+ <span class="k">try</span><span class="p">:</span>
+ <span class="n">t</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_table</span><span class="p">(</span><span class="n">table_name</span><span class="p">,</span> <span class="n">db</span><span class="p">)</span>
+ <span class="k">return</span> <span class="bp">True</span>
+ <span class="k">except</span> <span class="ne">Exception</span> <span class="k">as</span> <span class="n">e</span><span class="p">:</span>
+ <span class="k">return</span> <span class="bp">False</span></div></div>
+
+
+<div class="viewcode-block" id="HiveServer2Hook"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveServer2Hook">[docs]</a><span class="k">class</span> <span class="nc">HiveServer2Hook</span><span class="p">(</span><span class="n">BaseHook</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Wrapper around the impyla library</span>
+
+<span class="sd"> Note that the default authMechanism is PLAIN, to override it you</span>
+<span class="sd"> can specify it in the ``extra`` of your connection in the UI as in</span>
+<span class="sd"> """</span>
+ <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hiveserver2_conn_id</span><span class="o">=</span><span class="s1">'hiveserver2_default'</span><span class="p">):</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">hiveserver2_conn_id</span> <span class="o">=</span> <span class="n">hiveserver2_conn_id</span>
+
+ <span class="k">def</span> <span class="nf">get_conn</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
+ <span class="n">db</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_connection</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">hiveserver2_conn_id</span><span class="p">)</span>
+ <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">db</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'PLAIN'</span><span class="p">)</span>
+ <span class="n">kerberos_service_name</span> <span class="o">=</span> <span class="bp">None</span>
+ <span class="k">if</span> <span class="n">configuration</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'core'</span><span class="p">,</span> <span class="s1">'security'</span><span class="p">)</span> <span class="o">==</span> <span class="s1">'kerberos'</span><span class="p">:</span>
+ <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="n">db</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'authMechanism'</span><span class="p">,</span> <span class="s1">'GSSAPI'</span><span class="p">)</span>
+ <span class="n">kerberos_service_name</span> <span class="o">=</span> <span class="n">db</span><span class="o">.</span><span class="n">extra_dejson</span><span class="o">.</span><span class="n">get</span><span class="p">(</span><span class="s1">'kerberos_service_name'</span><span class="p">,</span> <span class="s1">'hive'</span><span class="p">)</span>
+
+ <span class="c1"># impyla uses GSSAPI instead of KERBEROS as a auth_mechanism identifier</span>
+ <span class="k">if</span> <span class="n">auth_mechanism</span> <span class="o">==</span> <span class="s1">'KERBEROS'</span><span class="p">:</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">warning</span><span class="p">(</span><span class="s2">"Detected deprecated 'KERBEROS' for authMechanism for </span><span class="si">%s</span><span class="s2">. Please use 'GSSAPI' instead"</span><span class="p">,</span>
+ <span class="bp">self</span><span class="o">.</span><span class="n">hiveserver2_conn_id</span><span class="p">)</span>
+ <span class="n">auth_mechanism</span> <span class="o">=</span> <span class="s1">'GSSAPI'</span>
+
+ <span class="kn">from</span> <span class="nn">impala.dbapi</span> <span class="kn">import</span> <span class="n">connect</span>
+ <span class="k">return</span> <span class="n">connect</span><span class="p">(</span>
+ <span class="n">host</span><span class="o">=</span><span class="n">db</span><span class="o">.</span><span class="n">host</span><span class="p">,</span>
+ <span class="n">port</span><span class="o">=</span><span class="n">db</span><span class="o">.</span><span class="n">port</span><span class="p">,</span>
+ <span class="n">auth_mechanism</span><span class="o">=</span><span class="n">auth_mechanism</span><span class="p">,</span>
+ <span class="n">kerberos_service_name</span><span class="o">=</span><span class="n">kerberos_service_name</span><span class="p">,</span>
+ <span class="n">user</span><span class="o">=</span><span class="n">db</span><span class="o">.</span><span class="n">login</span><span class="p">,</span>
+ <span class="n">database</span><span class="o">=</span><span class="n">db</span><span class="o">.</span><span class="n">schema</span> <span class="ow">or</span> <span class="s1">'default'</span><span class="p">)</span>
+
+ <span class="k">def</span> <span class="nf">get_results</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">,</span> <span class="n">arraysize</span><span class="o">=</span><span class="mi">1000</span><span class="p">):</span>
+ <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_conn</span><span class="p">()</span> <span class="k">as</span> <span class="n">conn</span><span class="p">:</span>
+ <span class="k">if</span> <span class="nb">isinstance</span><span class="p">(</span><span class="n">hql</span><span class="p">,</span> <span class="nb">basestring</span><span class="p">):</span>
+ <span class="n">hql</span> <span class="o">=</span> <span class="p">[</span><span class="n">hql</span><span class="p">]</span>
+ <span class="n">results</span> <span class="o">=</span> <span class="p">{</span>
+ <span class="s1">'data'</span><span class="p">:</span> <span class="p">[],</span>
+ <span class="s1">'header'</span><span class="p">:</span> <span class="p">[],</span>
+ <span class="p">}</span>
+ <span class="k">for</span> <span class="n">statement</span> <span class="ow">in</span> <span class="n">hql</span><span class="p">:</span>
+ <span class="k">with</span> <span class="n">conn</span><span class="o">.</span><span class="n">cursor</span><span class="p">()</span> <span class="k">as</span> <span class="n">cur</span><span class="p">:</span>
+ <span class="n">cur</span><span class="o">.</span><span class="n">execute</span><span class="p">(</span><span class="n">statement</span><span class="p">)</span>
+ <span class="n">records</span> <span class="o">=</span> <span class="n">cur</span><span class="o">.</span><span class="n">fetchall</span><span class="p">()</span>
+ <span class="k">if</span> <span class="n">records</span><span class="p">:</span>
+ <span class="n">results</span> <span class="o">=</span> <span class="p">{</span>
+ <span class="s1">'data'</span><span class="p">:</span> <span class="n">records</span><span class="p">,</span>
+ <span class="s1">'header'</span><span class="p">:</span> <span class="n">cur</span><span class="o">.</span><span class="n">description</span><span class="p">,</span>
+ <span class="p">}</span>
+ <span class="k">return</span> <span class="n">results</span>
+
+ <span class="k">def</span> <span class="nf">to_csv</span><span class="p">(</span>
+ <span class="bp">self</span><span class="p">,</span>
+ <span class="n">hql</span><span class="p">,</span>
+ <span class="n">csv_filepath</span><span class="p">,</span>
+ <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">,</span>
+ <span class="n">delimiter</span><span class="o">=</span><span class="s1">','</span><span class="p">,</span>
+ <span class="n">lineterminator</span><span class="o">=</span><span class="s1">'</span><span class="se">\r\n</span><span class="s1">'</span><span class="p">,</span>
+ <span class="n">output_header</span><span class="o">=</span><span class="bp">True</span><span class="p">,</span>
+ <span class="n">fetch_size</span><span class="o">=</span><span class="mi">1000</span><span class="p">):</span>
+ <span class="n">schema</span> <span class="o">=</span> <span class="n">schema</span> <span class="ow">or</span> <span class="s1">'default'</span>
+ <span class="k">with</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_conn</span><span class="p">()</span> <span class="k">as</span> <span class="n">conn</span><span class="p">:</span>
+ <span class="k">with</span> <span class="n">conn</span><span class="o">.</span><span class="n">cursor</span><span class="p">()</span> <span class="k">as</span> <span class="n">cur</span><span class="p">:</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Running query: "</span> <span class="o">+</span> <span class="n">hql</span><span class="p">)</span>
+ <span class="n">cur</span><span class="o">.</span><span class="n">execute</span><span class="p">(</span><span class="n">hql</span><span class="p">)</span>
+ <span class="n">schema</span> <span class="o">=</span> <span class="n">cur</span><span class="o">.</span><span class="n">description</span>
+ <span class="k">with</span> <span class="nb">open</span><span class="p">(</span><span class="n">csv_filepath</span><span class="p">,</span> <span class="s1">'wb'</span><span class="p">)</span> <span class="k">as</span> <span class="n">f</span><span class="p">:</span>
+ <span class="n">writer</span> <span class="o">=</span> <span class="n">csv</span><span class="o">.</span><span class="n">writer</span><span class="p">(</span><span class="n">f</span><span class="p">,</span> <span class="n">delimiter</span><span class="o">=</span><span class="n">delimiter</span><span class="p">,</span>
+ <span class="n">lineterminator</span><span class="o">=</span><span class="n">lineterminator</span><span class="p">,</span> <span class="n">encoding</span><span class="o">=</span><span class="s1">'utf-8'</span><span class="p">)</span>
+ <span class="k">if</span> <span class="n">output_header</span><span class="p">:</span>
+ <span class="n">writer</span><span class="o">.</span><span class="n">writerow</span><span class="p">([</span><span class="n">c</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span>
+ <span class="k">for</span> <span class="n">c</span> <span class="ow">in</span> <span class="n">cur</span><span class="o">.</span><span class="n">description</span><span class="p">])</span>
+ <span class="n">i</span> <span class="o">=</span> <span class="mi">0</span>
+ <span class="k">while</span> <span class="bp">True</span><span class="p">:</span>
+ <span class="n">rows</span> <span class="o">=</span> <span class="p">[</span><span class="n">row</span> <span class="k">for</span> <span class="n">row</span> <span class="ow">in</span> <span class="n">cur</span><span class="o">.</span><span class="n">fetchmany</span><span class="p">(</span><span class="n">fetch_size</span><span class="p">)</span> <span class="k">if</span> <span class="n">row</span><span class="p">]</span>
+ <span class="k">if</span> <span class="ow">not</span> <span class="n">rows</span><span class="p">:</span>
+ <span class="k">break</span>
+
+ <span class="n">writer</span><span class="o">.</span><span class="n">writerows</span><span class="p">(</span><span class="n">rows</span><span class="p">)</span>
+ <span class="n">i</span> <span class="o">+=</span> <span class="nb">len</span><span class="p">(</span><span class="n">rows</span><span class="p">)</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Written {0} rows so far."</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">i</span><span class="p">))</span>
+ <span class="n">logging</span><span class="o">.</span><span class="n">info</span><span class="p">(</span><span class="s2">"Done. Loaded a total of {0} rows."</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">i</span><span class="p">))</span>
+
+<div class="viewcode-block" id="HiveServer2Hook.get_records"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveServer2Hook.get_records">[docs]</a> <span class="k">def</span> <span class="nf">get_records</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Get a set of records from a Hive query.</span>
+
+<span class="sd"> >>> hh = HiveServer2Hook()</span>
+<span class="sd"> >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100"</span>
+<span class="sd"> >>> len(hh.get_records(sql))</span>
+<span class="sd"> 100</span>
+<span class="sd"> """</span>
+ <span class="k">return</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_results</span><span class="p">(</span><span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="n">schema</span><span class="p">)[</span><span class="s1">'data'</span><span class="p">]</span></div>
+
+<div class="viewcode-block" id="HiveServer2Hook.get_pandas_df"><a class="viewcode-back" href="../code.html#airflow.hooks.HiveServer2Hook.get_pandas_df">[docs]</a> <span class="k">def</span> <span class="nf">get_pandas_df</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span> <span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="s1">'default'</span><span class="p">):</span>
+ <span class="sd">"""</span>
+<span class="sd"> Get a pandas dataframe from a Hive query</span>
+
+<span class="sd"> >>> hh = HiveServer2Hook()</span>
+<span class="sd"> >>> sql = "SELECT * FROM airflow.static_babynames LIMIT 100"</span>
+<span class="sd"> >>> df = hh.get_pandas_df(sql)</span>
+<span class="sd"> >>> len(df.index)</span>
+<span class="sd"> 100</span>
+<span class="sd"> """</span>
+ <span class="kn">import</span> <span class="nn">pandas</span> <span class="kn">as</span> <span class="nn">pd</span>
+ <span class="n">res</span> <span class="o">=</span> <span class="bp">self</span><span class="o">.</span><span class="n">get_results</span><span class="p">(</span><span class="n">hql</span><span class="p">,</span> <span class="n">schema</span><span class="o">=</span><span class="n">schema</span><span class="p">)</span>
+ <span class="n">df</span> <span class="o">=</span> <span class="n">pd</span><span class="o">.</span><span class="n">DataFrame</span><span class="p">(</span><span class="n">res</span><span class="p">[</span><span class="s1">'data'</span><span class="p">])</span>
+ <span class="n">df</span><span class="o">.</span><span class="n">columns</span> <span class="o">=</span> <span class="p">[</span><span class="n">c</span><span class="p">[</span><span class="mi">0</span><span class="p">]</span> <span class="k">for</span> <span class="n">c</span> <span class="ow">in</span> <span class="n">res</span><span class="p">[</span><span class="s1">'header'</span><span class="p">]]</span>
+ <span class="k">return</span> <span class="n">df</span></div></div>
+</pre></div>
+
+ </div>
+ </div>
+ <footer>
+
+
+ <hr/>
+
+ <div role="contentinfo">
+ <p>
+ © Copyright 2014, Maxime Beauchemin, Airbnb.
+
+ </p>
+ </div>
+ Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>.
+
+</footer>
+
+ </div>
+ </div>
+
+ </section>
+
+ </div>
+
+
+
+
+
+ <script type="text/javascript">
+ var DOCUMENTATION_OPTIONS = {
+ URL_ROOT:'../',
+ VERSION:'',
+ COLLAPSE_INDEX:false,
+ FILE_SUFFIX:'.html',
+ HAS_SOURCE: true
+ };
+ </script>
+ <script type="text/javascript" src="../_static/jquery.js"></script>
+ <script type="text/javascript" src="../_static/underscore.js"></script>
+ <script type="text/javascript" src="../_static/doctools.js"></script>
+
+
+
+
+
+ <script type="text/javascript" src="../_static/js/theme.js"></script>
+
+
+
+
+ <script type="text/javascript">
+ jQuery(function () {
+ SphinxRtdTheme.StickyNav.enable();
+ });
+ </script>
+
+
+</body>
+</html>
\ No newline at end of file