You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by cr...@apache.org on 2018/01/03 17:48:09 UTC

[09/35] incubator-airflow-site git commit: 1.9.0

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_modules/slack_operator.html
----------------------------------------------------------------------
diff --git a/_modules/slack_operator.html b/_modules/slack_operator.html
index 152902b..65a1a4d 100644
--- a/_modules/slack_operator.html
+++ b/_modules/slack_operator.html
@@ -13,6 +13,8 @@
 
   
   
+  
+  
 
   
 
@@ -30,6 +32,9 @@
   
 
   
+        <link rel="index" title="Index"
+              href="../genindex.html"/>
+        <link rel="search" title="Search" href="../search.html"/>
     <link rel="top" title="Airflow Documentation" href="../index.html"/>
         <link rel="up" title="Module code" href="index.html"/> 
 
@@ -40,6 +45,7 @@
 
 <body class="wy-body-for-nav" role="document">
 
+   
   <div class="wy-grid-for-nav">
 
     
@@ -76,7 +82,10 @@
           
             
             
-                <ul>
+              
+            
+            
+              <ul>
 <li class="toctree-l1"><a class="reference internal" href="../project.html">Project</a></li>
 <li class="toctree-l1"><a class="reference internal" href="../license.html">License</a></li>
 <li class="toctree-l1"><a class="reference internal" href="../start.html">Quick Start</a></li>
@@ -90,6 +99,8 @@
 <li class="toctree-l1"><a class="reference internal" href="../scheduler.html">Scheduling &amp; Triggers</a></li>
 <li class="toctree-l1"><a class="reference internal" href="../plugins.html">Plugins</a></li>
 <li class="toctree-l1"><a class="reference internal" href="../security.html">Security</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../api.html">Experimental Rest API</a></li>
+<li class="toctree-l1"><a class="reference internal" href="../integration.html">Integration</a></li>
 <li class="toctree-l1"><a class="reference internal" href="../faq.html">FAQ</a></li>
 <li class="toctree-l1"><a class="reference internal" href="../code.html">API Reference</a></li>
 </ul>
@@ -104,8 +115,10 @@
 
       
       <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
-        <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
-        <a href="../index.html">Airflow</a>
+        
+          <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
+          <a href="../index.html">Airflow</a>
+        
       </nav>
 
 
@@ -118,19 +131,36 @@
 
 
 
+
+
+
+
+
+
+
+
+
+
 <div role="navigation" aria-label="breadcrumbs navigation">
+
   <ul class="wy-breadcrumbs">
-    <li><a href="../index.html">Docs</a> &raquo;</li>
-      
+    
+      <li><a href="../index.html">Docs</a> &raquo;</li>
+        
           <li><a href="index.html">Module code</a> &raquo;</li>
-      
-    <li>slack_operator</li>
+        
+      <li>slack_operator</li>
+    
+    
       <li class="wy-breadcrumbs-aside">
         
-          
+            
         
       </li>
+    
   </ul>
+
+  
   <hr/>
 </div>
           <div role="main" class="document" itemscope="itemscope" itemtype="http://schema.org/Article">
@@ -151,12 +181,12 @@
 <span class="c1"># See the License for the specific language governing permissions and</span>
 <span class="c1"># limitations under the License.</span>
 
-<span class="kn">from</span> <span class="nn">slackclient</span> <span class="kn">import</span> <span class="n">SlackClient</span>
-<span class="kn">from</span> <span class="nn">airflow.models</span> <span class="kn">import</span> <span class="n">BaseOperator</span>
-<span class="kn">from</span> <span class="nn">airflow.utils.decorators</span> <span class="kn">import</span> <span class="n">apply_defaults</span>
-<span class="kn">from</span> <span class="nn">airflow.exceptions</span> <span class="kn">import</span> <span class="n">AirflowException</span>
 <span class="kn">import</span> <span class="nn">json</span>
-<span class="kn">import</span> <span class="nn">logging</span>
+
+<span class="kn">from</span> <span class="nn">slackclient</span> <span class="k">import</span> <span class="n">SlackClient</span>
+<span class="kn">from</span> <span class="nn">airflow.models</span> <span class="k">import</span> <span class="n">BaseOperator</span>
+<span class="kn">from</span> <span class="nn">airflow.utils.decorators</span> <span class="k">import</span> <span class="n">apply_defaults</span>
+<span class="kn">from</span> <span class="nn">airflow.exceptions</span> <span class="k">import</span> <span class="n">AirflowException</span>
 
 
 <div class="viewcode-block" id="SlackAPIOperator"><a class="viewcode-back" href="../code.html#airflow.operators.SlackAPIOperator">[docs]</a><span class="k">class</span> <span class="nc">SlackAPIOperator</span><span class="p">(</span><span class="n">BaseOperator</span><span class="p">):</span>
@@ -177,9 +207,9 @@
     <span class="k">def</span> <span class="nf">__init__</span><span class="p">(</span><span class="bp">self</span><span class="p">,</span>
                  <span class="n">token</span><span class="o">=</span><span class="s1">&#39;unset&#39;</span><span class="p">,</span>
                  <span class="n">method</span><span class="o">=</span><span class="s1">&#39;unset&#39;</span><span class="p">,</span>
-                 <span class="n">api_params</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span>
+                 <span class="n">api_params</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
                  <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
-        <span class="nb">super</span><span class="p">(</span><span class="n">SlackAPIOperator</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="n">__init__</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
+        <span class="nb">super</span><span class="p">(</span><span class="n">SlackAPIOperator</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">token</span> <span class="o">=</span> <span class="n">token</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="n">method</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">api_params</span> <span class="o">=</span> <span class="n">api_params</span>
@@ -205,8 +235,9 @@
         <span class="n">sc</span> <span class="o">=</span> <span class="n">SlackClient</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">token</span><span class="p">)</span>
         <span class="n">rc</span> <span class="o">=</span> <span class="n">sc</span><span class="o">.</span><span class="n">api_call</span><span class="p">(</span><span class="bp">self</span><span class="o">.</span><span class="n">method</span><span class="p">,</span> <span class="o">**</span><span class="bp">self</span><span class="o">.</span><span class="n">api_params</span><span class="p">)</span>
         <span class="k">if</span> <span class="ow">not</span> <span class="n">rc</span><span class="p">[</span><span class="s1">&#39;ok&#39;</span><span class="p">]:</span>
-            <span class="n">logging</span><span class="o">.</span><span class="n">error</span><span class="p">(</span><span class="s2">&quot;Slack API call failed ({})&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">rc</span><span class="p">[</span><span class="s1">&#39;error&#39;</span><span class="p">]))</span>
-            <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="s2">&quot;Slack API call failed: ({})&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">rc</span><span class="p">[</span><span class="s1">&#39;error&#39;</span><span class="p">]))</span></div></div>
+            <span class="n">msg</span> <span class="o">=</span> <span class="s2">&quot;Slack API call failed (</span><span class="si">%s</span><span class="s2">)&quot;</span><span class="o">.</span><span class="n">format</span><span class="p">(</span><span class="n">rc</span><span class="p">[</span><span class="s1">&#39;error&#39;</span><span class="p">])</span>
+            <span class="bp">self</span><span class="o">.</span><span class="n">log</span><span class="o">.</span><span class="n">error</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span>
+            <span class="k">raise</span> <span class="n">AirflowException</span><span class="p">(</span><span class="n">msg</span><span class="p">)</span></div></div>
 
 
 <div class="viewcode-block" id="SlackAPIPostOperator"><a class="viewcode-back" href="../code.html#airflow.operators.SlackAPIPostOperator">[docs]</a><span class="k">class</span> <span class="nc">SlackAPIPostOperator</span><span class="p">(</span><span class="n">SlackAPIOperator</span><span class="p">):</span>
@@ -225,7 +256,7 @@
 <span class="sd">    :type attachments: array of hashes</span>
 <span class="sd">    &quot;&quot;&quot;</span>
 
-    <span class="n">template_fields</span> <span class="o">=</span> <span class="p">(</span><span class="s1">&#39;username&#39;</span><span class="p">,</span> <span class="s1">&#39;text&#39;</span><span class="p">,</span> <span class="s1">&#39;attachments&#39;</span><span class="p">)</span>
+    <span class="n">template_fields</span> <span class="o">=</span> <span class="p">(</span><span class="s1">&#39;username&#39;</span><span class="p">,</span> <span class="s1">&#39;text&#39;</span><span class="p">,</span> <span class="s1">&#39;attachments&#39;</span><span class="p">,</span> <span class="s1">&#39;channel&#39;</span><span class="p">)</span>
     <span class="n">ui_color</span> <span class="o">=</span> <span class="s1">&#39;#FFBA40&#39;</span>
 
     <span class="nd">@apply_defaults</span>
@@ -236,7 +267,7 @@
                       <span class="s1">&#39;Here is a cat video instead</span><span class="se">\n</span><span class="s1">&#39;</span>
                       <span class="s1">&#39;https://www.youtube.com/watch?v=J---aiyznGQ&#39;</span><span class="p">,</span>
                  <span class="n">icon_url</span><span class="o">=</span><span class="s1">&#39;https://raw.githubusercontent.com/airbnb/airflow/master/airflow/www/static/pin_100.png&#39;</span><span class="p">,</span>
-                 <span class="n">attachments</span><span class="o">=</span><span class="bp">None</span><span class="p">,</span>
+                 <span class="n">attachments</span><span class="o">=</span><span class="kc">None</span><span class="p">,</span>
                  <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">):</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">method</span> <span class="o">=</span> <span class="s1">&#39;chat.postMessage&#39;</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">channel</span> <span class="o">=</span> <span class="n">channel</span>
@@ -244,7 +275,7 @@
         <span class="bp">self</span><span class="o">.</span><span class="n">text</span> <span class="o">=</span> <span class="n">text</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">icon_url</span> <span class="o">=</span> <span class="n">icon_url</span>
         <span class="bp">self</span><span class="o">.</span><span class="n">attachments</span> <span class="o">=</span> <span class="n">attachments</span>
-        <span class="nb">super</span><span class="p">(</span><span class="n">SlackAPIPostOperator</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="n">__init__</span><span class="p">(</span><span class="n">method</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">method</span><span class="p">,</span>
+        <span class="nb">super</span><span class="p">(</span><span class="n">SlackAPIPostOperator</span><span class="p">,</span> <span class="bp">self</span><span class="p">)</span><span class="o">.</span><span class="fm">__init__</span><span class="p">(</span><span class="n">method</span><span class="o">=</span><span class="bp">self</span><span class="o">.</span><span class="n">method</span><span class="p">,</span>
                                                    <span class="o">*</span><span class="n">args</span><span class="p">,</span> <span class="o">**</span><span class="n">kwargs</span><span class="p">)</span>
 
     <span class="k">def</span> <span class="nf">construct_api_call_params</span><span class="p">(</span><span class="bp">self</span><span class="p">):</span>
@@ -258,6 +289,9 @@
 </pre></div>
 
            </div>
+           <div class="articleComments">
+            
+           </div>
           </div>
           <footer>
   
@@ -290,7 +324,8 @@
             VERSION:'',
             COLLAPSE_INDEX:false,
             FILE_SUFFIX:'.html',
-            HAS_SOURCE:  true
+            HAS_SOURCE:  true,
+            SOURCELINK_SUFFIX: '.txt'
         };
     </script>
       <script type="text/javascript" src="../_static/jquery.js"></script>

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_modules/sqlite_hook.html
----------------------------------------------------------------------
diff --git a/_modules/sqlite_hook.html b/_modules/sqlite_hook.html
index dc33f36..d64dd1c 100644
--- a/_modules/sqlite_hook.html
+++ b/_modules/sqlite_hook.html
@@ -13,6 +13,8 @@
 
   
   
+  
+  
 
   
 
@@ -80,7 +82,10 @@
           
             
             
-                <ul>
+              
+            
+            
+              <ul>
 <li class="toctree-l1"><a class="reference internal" href="../project.html">Project</a></li>
 <li class="toctree-l1"><a class="reference internal" href="../license.html">License</a></li>
 <li class="toctree-l1"><a class="reference internal" href="../start.html">Quick Start</a></li>

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/api.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/api.rst.txt b/_sources/api.rst.txt
index eef671c..856ec9e 100644
--- a/_sources/api.rst.txt
+++ b/_sources/api.rst.txt
@@ -28,16 +28,26 @@ configure as follows:
 Authentication
 --------------
 
-Only Kerberos authentication is currently supported for the API. To enable this set the following
-in the configuration:
+Authentication for the API is handled separately to the Web Authentication. The default is to not
+require any authentication on the API -- i.e. wide open by default. This is not recommended if your
+Airflow webserver is publicly accessible, and you should probably use the deny all backend:
 
-.. code-block:: bash
+.. code-block:: ini
+
+    [api]
+    auth_backend = airflow.api.auth.backend.deny_all
+
+
+Kerberos is the only "real" authentication mechanism currently supported for the API. To enable
+this set the following in the configuration:
+
+.. code-block:: ini
 
     [api]
-    auth_backend = airflow.api.auth.backend.default
+    auth_backend = airflow.api.auth.backend.kerberos_auth
 
     [kerberos]
     keytab = <KEYTAB>
 
-The Kerberos service is configured as `airflow/fully.qualified.domainname@REALM`. Make sure this
+The Kerberos service is configured as ``airflow/fully.qualified.domainname@REALM``. Make sure this
 principal exists in the keytab file.

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/code.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/code.rst.txt b/_sources/code.rst.txt
index fabe6db..1369b32 100644
--- a/_sources/code.rst.txt
+++ b/_sources/code.rst.txt
@@ -72,6 +72,7 @@ Operator API
         PrestoIntervalCheckOperator,
         PrestoValueCheckOperator,
         PythonOperator,
+        PythonVirtualenvOperator,
         S3KeySensor,
         S3ToHiveTransfer,
         ShortCircuitOperator,
@@ -91,13 +92,15 @@ Community-contributed Operators
 .. automodule:: airflow.contrib.operators
     :show-inheritance:
     :members:
-        SSHExecuteOperator,
+        SSHOperator,
         VerticaOperator,
         VerticaToHiveTransfer
 
 .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator
 .. autoclass:: airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator
+.. autoclass:: airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator
 .. autoclass:: airflow.contrib.operators.ecs_operator.ECSOperator
+.. autoclass:: airflow.contrib.operators.file_to_wasb.FileToWasbOperator
 .. autoclass:: airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator
 .. autoclass:: airflow.contrib.operators.QuboleOperator
 .. autoclass:: airflow.contrib.operators.hipchat_operator.HipChatAPIOperator
@@ -143,13 +146,13 @@ Variable                            Description
                                     key within the JSON object
 ``{{ task_instance_key_str }}``     a unique, human-readable key to the task instance
                                     formatted ``{dag_id}_{task_id}_{ds}``
-``conf``                            the full configuration object located at
+``{{ conf }}``                      the full configuration object located at
                                     ``airflow.configuration.conf`` which
                                     represents the content of your
                                     ``airflow.cfg``
-``run_id``                          the ``run_id`` of the current DAG run
-``dag_run``                         a reference to the DagRun object
-``test_mode``                       whether the task instance was called using
+``{{ run_id }}``                    the ``run_id`` of the current DAG run
+``{{ dag_run }}``                   a reference to the DagRun object
+``{{ test_mode }}``                 whether the task instance was called using
                                     the CLI's test subcommand
 =================================   ====================================
 
@@ -213,6 +216,7 @@ Hooks
     :show-inheritance:
     :members:
         DbApiHook,
+        DockerHook,
         HiveCliHook,
         HiveMetastoreHook,
         HiveServer2Hook,

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/concepts.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/concepts.rst.txt b/_sources/concepts.rst.txt
index 9f65256..56a9bdb 100644
--- a/_sources/concepts.rst.txt
+++ b/_sources/concepts.rst.txt
@@ -110,7 +110,7 @@ Airflow provides operators for many common tasks, including:
 - ``PythonOperator`` - calls an arbitrary Python function
 - ``EmailOperator`` - sends an email
 - ``HTTPOperator`` - sends an HTTP request
-- ``SqlOperator`` - executes a SQL command
+- ``MySqlOperator``, ``SqliteOperator``, ``PostgresOperator``, ``MsSqlOperator``, ``OracleOperator``, ``JdbcOperator``, etc. - executes a SQL command
 - ``Sensor`` - waits for a certain time, file, database row, S3 key, etc...
 
 
@@ -207,8 +207,7 @@ We can put this all together to build a simple pipeline:
 
     with DAG('my_dag', start_date=datetime(2016, 1, 1)) as dag:
         (
-            dag
-            >> DummyOperator(task_id='dummy_1')
+            DummyOperator(task_id='dummy_1')
             >> BashOperator(
                 task_id='bash_1',
                 bash_command='echo "HELLO!"')
@@ -756,6 +755,8 @@ to the related tasks in Airflow.
 This content will get rendered as markdown respectively in the "Graph View" and
 "Task Details" pages.
 
+.. _jinja-templating:
+
 Jinja Templating
 ================
 
@@ -781,7 +782,8 @@ Here, ``{{ ds }}`` is a macro, and because the ``env`` parameter of the
 as an environment variable named ``EXECUTION_DATE`` in your Bash script.
 
 You can use Jinja templating with every parameter that is marked as "templated"
-in the documentation.
+in the documentation. Template substitution occurs just before the pre_execute
+function of your operator is called.
 
 Packaged dags
 '''''''''''''

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/configuration.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/configuration.rst.txt b/_sources/configuration.rst.txt
index c4a3442..e68a341 100644
--- a/_sources/configuration.rst.txt
+++ b/_sources/configuration.rst.txt
@@ -83,6 +83,31 @@ within the metadata database. The ``crypto`` package is highly recommended
 during installation. The ``crypto`` package does require that your operating
 system have libffi-dev installed.
 
+If ``crypto`` package was not installed initially, you can still enable encryption for
+connections by following steps below:
+
+1. Install crypto package ``pip install apache-airflow[crypto]``
+2. Generate fernet_key, using this code snippet below. fernet_key must be a base64-encoded 32-byte key.
+
+.. code:: python
+
+    from cryptography.fernet import Fernet
+    fernet_key= Fernet.generate_key()
+    print(fernet_key) # your fernet_key, keep it in secured place!
+
+3. Replace ``airflow.cfg`` fernet_key value with the one from step 2.
+Alternatively, you can store your fernet_key in OS environment variable. You
+do not need to change ``airflow.cfg`` in this case as AirFlow will use environment
+variable over the value in ``airflow.cfg``:
+
+.. code-block:: bash
+
+  # Note the double underscores
+  EXPORT AIRFLOW__CORE__FERNET_KEY = your_fernet_key
+
+4. Restart AirFlow webserver.
+5. For existing connections (the ones that you had defined before installing ``airflow[crypto]`` and creating a Fernet key), you need to open each connection in the connection admin UI, re-type the password, and save it.
+
 Connections in Airflow pipelines can be created using environment variables.
 The environment variable needs to have a prefix of ``AIRFLOW_CONN_`` for
 Airflow with the value in a URI format to use the connection properly. Please
@@ -131,6 +156,41 @@ to monitor your workers. You can use the shortcut command ``airflow flower``
 to start a Flower web server.
 
 
+Scaling Out with Dask
+'''''''''''''''''''''
+
+``DaskExecutor`` allows you to run Airflow tasks in a Dask Distributed cluster.
+
+Dask clusters can be run on a single machine or on remote networks. For complete
+details, consult the `Distributed documentation <https://distributed.readthedocs.io/>`_.
+
+To create a cluster, first start a Scheduler:
+
+.. code-block:: bash
+
+    # default settings for a local cluster
+    DASK_HOST=127.0.0.1
+    DASK_PORT=8786
+
+    dask-scheduler --host $DASK_HOST --port $DASK_PORT
+
+Next start at least one Worker on any machine that can connect to the host:
+
+.. code-block:: bash
+
+    dask-worker $DASK_HOST:$DASK_PORT
+
+Edit your ``airflow.cfg`` to set your executor to ``DaskExecutor`` and provide
+the Dask Scheduler address in the ``[dask]`` section.
+
+Please note:
+
+- Each Dask worker must be able to import Airflow and any dependencies you
+  require.
+- Dask does not support queues. If an Airflow task was created with a queue, a
+  warning will be raised but the task will be submitted to the cluster.
+
+
 Logs
 ''''
 Users can specify a logs folder in ``airflow.cfg``. By default, it is in
@@ -159,7 +219,8 @@ try to use ``S3Hook('MyS3Conn')``.
 In the Airflow Web UI, local logs take precedance over remote logs. If local logs
 can not be found or accessed, the remote logs will be displayed. Note that logs
 are only sent to remote storage once a task completes (including failure). In other
-words, remote logs for running tasks are unavailable.
+words, remote logs for running tasks are unavailable. Logs are stored in the log
+folder as ``{dag_id}/{task_id}/{execution_date}/{try_number}.log``.
 
 Scaling Out on Mesos (community contributed)
 ''''''''''''''''''''''''''''''''''''''''''''

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/faq.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/faq.rst.txt b/_sources/faq.rst.txt
index 1e4c038..04cf346 100644
--- a/_sources/faq.rst.txt
+++ b/_sources/faq.rst.txt
@@ -24,7 +24,7 @@ Here are some of the common causes:
   do not override their parent DAG's ``schedule_interval``.
 
 - Is your ``start_date`` beyond where you can see it in the UI? If you
-  set your it to some time say 3 months ago, you won't be able to see
+  set your ``start_date`` to some time say 3 months ago, you won't be able to see
   it in the main view in the UI, but you should be able to see it in the
   ``Menu -> Browse ->Task Instances``.
 
@@ -66,8 +66,8 @@ documentation
 Why are connection passwords still not encrypted in the metadata db after I installed airflow[crypto]?
 ------------------------------------------------------------------------------------------------------
 
-- Verify that the ``fernet_key`` defined in ``$AIRFLOW_HOME/airflow.cfg`` is a valid Fernet key. It must be a base64-encoded 32-byte key. You need to restart the webserver after you update the key
-- For existing connections (the ones that you had defined before installing ``airflow[crypto]`` and creating a Fernet key), you need to open each connection in the connection admin UI, re-type the password, and save it
+Check out the ``Connections`` section in the Configuration section of the
+documentation
 
 What's the deal with ``start_date``?
 ------------------------------------
@@ -80,7 +80,7 @@ task. From that point on, the scheduler creates new DagRuns based on
 your ``schedule_interval`` and the corresponding task instances run as your
 dependencies are met. When introducing new tasks to your DAG, you need to
 pay special attention to ``start_date``, and may want to reactivate
-inactive DagRuns to get the new task to get onboarded properly.
+inactive DagRuns to get the new task onboarded properly.
 
 We recommend against using dynamic values as ``start_date``, especially
 ``datetime.now()`` as it can be quite confusing. The task is triggered
@@ -103,7 +103,7 @@ it enforces this idea of rounded schedules.
 
 When using ``depends_on_past=True`` it's important to pay special attention
 to ``start_date`` as the past dependency is not enforced only on the specific
-schedule of the ``start_date`` specified for the task. It' also
+schedule of the ``start_date`` specified for the task. It's also
 important to watch DagRun activity status in time when introducing
 new ``depends_on_past=True``, unless you are planning on running a backfill
 for the new task(s).
@@ -111,7 +111,7 @@ for the new task(s).
 Also important to note is that the tasks ``start_date``, in the context of a
 backfill CLI command, get overridden by the backfill's command ``start_date``.
 This allows for a backfill on tasks that have ``depends_on_past=True`` to
-actually start, if it wasn't the case, the backfill just wouldn't start.
+actually start, if that wasn't the case, the backfill just wouldn't start.
 
 How can I create DAGs dynamically?
 ----------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/installation.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/installation.rst.txt b/_sources/installation.rst.txt
index 289f64f..b4fb126 100644
--- a/_sources/installation.rst.txt
+++ b/_sources/installation.rst.txt
@@ -8,18 +8,18 @@ The easiest way to install the latest stable version of Airflow is with ``pip``:
 
 .. code-block:: bash
 
-    pip install airflow
+    pip install apache-airflow
 
 You can also install Airflow with support for extra features like ``s3`` or ``postgres``:
 
 .. code-block:: bash
 
-    pip install "airflow[s3, postgres]"
+    pip install "apache-airflow[s3, postgres]"
 
 Extra Packages
 ''''''''''''''
 
-The ``airflow`` PyPI basic package only installs what's needed to get started.
+The ``apache-airflow`` PyPI basic package only installs what's needed to get started.
 Subpackages can be installed depending on what will be useful in your
 environment. For instance, if you don't need connectivity with Postgres,
 you won't have to go through the trouble of installing the ``postgres-devel``
@@ -30,61 +30,63 @@ these extra dependencies.
 
 Here's the list of the subpackages and what they enable:
 
-+---------------+-------------------------------------+-------------------------------------------------+
-| subpackage    |     install command                 | enables                                         |
-+===============+=====================================+=================================================+
-|  all          | ``pip install airflow[all]``        | All Airflow features known to man               |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  all_dbs      | ``pip install airflow[all_dbs]``    | All databases integrations                      |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  async        | ``pip install airflow[async]``      | Async worker classes for gunicorn               |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  devel        | ``pip install airflow[devel]``      | Minimum dev tools requirements                  |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  devel_hadoop |``pip install airflow[devel_hadoop]``| Airflow + dependencies on the Hadoop stack      |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  celery       | ``pip install airflow[celery]``     | CeleryExecutor                                  |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  crypto       | ``pip install airflow[crypto]``     | Encrypt connection passwords in metadata db     |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  druid        | ``pip install airflow[druid]``      | Druid.io related operators & hooks              |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  gcp_api      | ``pip install airflow[gcp_api]``    | Google Cloud Platform hooks and operators       |
-|               |                                     | (using ``google-api-python-client``)            |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  jdbc         | ``pip install airflow[jdbc]``       | JDBC hooks and operators                        |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  hdfs         | ``pip install airflow[hdfs]``       | HDFS hooks and operators                        |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  hive         | ``pip install airflow[hive]``       | All Hive related operators                      |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  kerberos     | ``pip install airflow[kerberos]``   | kerberos integration for kerberized hadoop      |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  ldap         | ``pip install airflow[ldap]``       | ldap authentication for users                   |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  mssql        | ``pip install airflow[mssql]``      | Microsoft SQL operators and hook,               |
-|               |                                     | support as an Airflow backend                   |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  mysql        | ``pip install airflow[mysql]``      | MySQL operators and hook, support as            |
-|               |                                     | an Airflow backend                              |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  password     | ``pip install airflow[password]``   | Password Authentication for users               |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  postgres     | ``pip install airflow[postgres]``   | Postgres operators and hook, support            |
-|               |                                     | as an Airflow backend                           |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  qds          | ``pip install airflow[qds]``        | Enable QDS (qubole data services) support       |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  rabbitmq     | ``pip install airflow[rabbitmq]``   | Rabbitmq support as a Celery backend            |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  s3           | ``pip install airflow[s3]``         | ``S3KeySensor``, ``S3PrefixSensor``             |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  samba        | ``pip install airflow[samba]``      | ``Hive2SambaOperator``                          |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  slack        | ``pip install airflow[slack]``      | ``SlackAPIPostOperator``                        |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  vertica      | ``pip install airflow[vertica]``    | Vertica hook                                    |
-|               |                                     | support as an Airflow backend                   |
-+---------------+-------------------------------------+-------------------------------------------------+
-|  cloudant     | ``pip install airflow[cloudant]``   | Cloudant hook                                   |
-+---------------+-------------------------------------+-------------------------------------------------+
++---------------+----------------------------------------------+-------------------------------------------------+
+| subpackage    | install command                              | enables                                         |
++===============+==============================================+=================================================+
+|  all          | ``pip install apache-airflow[all]``          | All Airflow features known to man               |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  all_dbs      | ``pip install apache-airflow[all_dbs]``      | All databases integrations                      |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  async        | ``pip install apache-airflow[async]``        | Async worker classes for gunicorn               |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  devel        | ``pip install apache-airflow[devel]``        | Minimum dev tools requirements                  |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  devel_hadoop | ``pip install apache-airflow[devel_hadoop]`` | Airflow + dependencies on the Hadoop stack      |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  celery       | ``pip install apache-airflow[celery]``       | CeleryExecutor                                  |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  crypto       | ``pip install apache-airflow[crypto]``       | Encrypt connection passwords in metadata db     |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  druid        | ``pip install apache-airflow[druid]``        | Druid.io related operators & hooks              |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  gcp_api      | ``pip install apache-airflow[gcp_api]``      | Google Cloud Platform hooks and operators       |
+|               |                                              | (using ``google-api-python-client``)            |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  jdbc         | ``pip install apache-airflow[jdbc]``         | JDBC hooks and operators                        |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  hdfs         | ``pip install apache-airflow[hdfs]``         | HDFS hooks and operators                        |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  hive         | ``pip install apache-airflow[hive]``         | All Hive related operators                      |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  kerberos     | ``pip install apache-airflow[kerberos]``     | kerberos integration for kerberized hadoop      |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  ldap         | ``pip install apache-airflow[ldap]``         | ldap authentication for users                   |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  mssql        | ``pip install apache-airflow[mssql]``        | Microsoft SQL operators and hook,               |
+|               |                                              | support as an Airflow backend                   |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  mysql        | ``pip install apache-airflow[mysql]``        | MySQL operators and hook, support as            |
+|               |                                              | an Airflow backend                              |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  password     | ``pip install apache-airflow[password]``     | Password Authentication for users               |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  postgres     | ``pip install apache-airflow[postgres]``     | Postgres operators and hook, support            |
+|               |                                              | as an Airflow backend                           |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  qds          | ``pip install apache-airflow[qds]``          | Enable QDS (qubole data services) support       |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  rabbitmq     | ``pip install apache-airflow[rabbitmq]``     | Rabbitmq support as a Celery backend            |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  s3           | ``pip install apache-airflow[s3]``           | ``S3KeySensor``, ``S3PrefixSensor``             |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  samba        | ``pip install apache-airflow[samba]``        | ``Hive2SambaOperator``                          |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  slack        | ``pip install apache-airflow[slack]``        | ``SlackAPIPostOperator``                        |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  vertica      | ``pip install apache-airflow[vertica]``      | Vertica hook                                    |
+|               |                                              | support as an Airflow backend                   |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  cloudant     | ``pip install apache-airflow[cloudant]``     | Cloudant hook                                   |
++---------------+----------------------------------------------+-------------------------------------------------+
+|  redis        | ``pip install apache-airflow[redis]``        | Redis hooks and sensors                         |
++---------------+----------------------------------------------+-------------------------------------------------+

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/integration.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/integration.rst.txt b/_sources/integration.rst.txt
index 10bc038..4887486 100644
--- a/_sources/integration.rst.txt
+++ b/_sources/integration.rst.txt
@@ -1,15 +1,179 @@
 Integration
 ===========
 
+- :ref:`Azure`
 - :ref:`AWS`
+- :ref:`Databricks`
 - :ref:`GCP`
 
+
+.. _Azure:
+
+Azure: Microsoft Azure
+----------------------
+
+Airflow has limited support for Microsoft Azure: interfaces exist only for Azure Blob
+Storage. Note that the Hook, Sensor and Operator are in the contrib section.
+
+Azure Blob Storage
+''''''''''''''''''
+
+All classes communicate via the Window Azure Storage Blob protocol. Make sure that a
+Airflow connection of type `wasb` exists. Authorization can be done by supplying a
+login (=Storage account name) and password (=KEY), or login and SAS token in the extra
+field (see connection `wasb_default` for an example).
+
+- :ref:`WasbBlobSensor`: Checks if a blob is present on Azure Blob storage.
+- :ref:`WasbPrefixSensor`: Checks if blobs matching a prefix are present on Azure Blob storage.
+- :ref:`FileToWasbOperator`: Uploads a local file to a container as a blob.
+- :ref:`WasbHook`: Interface with Azure Blob Storage.
+
+.. _WasbBlobSensor:
+
+WasbBlobSensor
+"""""""""""""""
+
+.. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbBlobSensor
+
+.. _WasbPrefixSensor:
+
+WasbPrefixSensor
+"""""""""""""""""
+
+.. autoclass:: airflow.contrib.sensors.wasb_sensor.WasbPrefixSensor
+
+.. _FileToWasbOperator:
+
+FileToWasbOperator
+"""""""""""""""""""
+
+.. autoclass:: airflow.contrib.operators.file_to_wasb.FileToWasbOperator
+
+.. _WasbHook:
+
+WasbHook
+"""""""""
+
+.. autoclass:: airflow.contrib.hooks.wasb_hook.WasbHook
+
+
+
 .. _AWS:
 
-AWS: Amazon Webservices
+AWS: Amazon Web Services
 -----------------------
 
----
+Airflow has extensive support for Amazon Web Services. But note that the Hooks, Sensors and
+Operators are in the contrib section.
+
+AWS EMR
+''''''''
+
+- :ref:`EmrAddStepsOperator` : Adds steps to an existing EMR JobFlow.
+- :ref:`EmrCreateJobFlowOperator` : Creates an EMR JobFlow, reading the config from the EMR connection.
+- :ref:`EmrTerminateJobFlowOperator` : Terminates an EMR JobFlow.
+- :ref:`EmrHook` : Interact with AWS EMR.
+
+.. _EmrAddStepsOperator:
+
+EmrAddStepsOperator
+""""""""
+
+.. autoclass:: airflow.contrib.operators.emr_add_steps_operator.EmrAddStepsOperator
+
+.. _EmrCreateJobFlowOperator:
+
+EmrCreateJobFlowOperator
+""""""""
+
+.. autoclass:: airflow.contrib.operators.emr_create_job_flow_operator.EmrCreateJobFlowOperator
+
+.. _EmrTerminateJobFlowOperator:
+
+EmrTerminateJobFlowOperator
+""""""""
+
+.. autoclass:: airflow.contrib.operators.emr_terminate_job_flow_operator.EmrTerminateJobFlowOperator
+
+.. _EmrHook:
+
+EmrHook
+""""""""
+
+.. autoclass:: airflow.contrib.hooks.emr_hook.EmrHook
+
+
+AWS S3
+'''''''
+
+- :ref:`S3FileTransformOperator` : Copies data from a source S3 location to a temporary location on the local filesystem.
+- :ref:`S3ToHiveTransfer` : Moves data from S3 to Hive. The operator downloads a file from S3, stores the file locally before loading it into a Hive table.
+- :ref:`S3Hook` : Interact with AWS S3.
+
+.. _S3FileTransformOperator:
+
+S3FileTransformOperator
+""""""""""""""""""""""""
+
+.. autoclass:: airflow.operators.s3_file_transform_operator.S3FileTransformOperator
+
+.. _S3ToHiveTransfer:
+
+S3ToHiveTransfer
+"""""""""""""""""
+
+.. autoclass:: airflow.operators.s3_to_hive_operator.S3ToHiveTransfer
+
+.. _S3Hook:
+
+S3Hook
+"""""""
+
+.. autoclass:: airflow.hooks.S3_hook.S3Hook
+
+
+AWS EC2 Container Service
+''''''''''''''''''''''''''
+
+- :ref:`ECSOperator` : Execute a task on AWS EC2 Container Service.
+
+.. _ECSOperator:
+
+ECSOperator
+""""""""""""
+
+.. autoclass:: airflow.contrib.operators.ecs_operator.ECSOperator
+
+
+AWS RedShift
+'''''''''''''
+
+- :ref:`RedshiftToS3Transfer` : Executes an unload command to S3 as a CSV with headers.
+
+.. _RedshiftToS3Transfer:
+
+RedshiftToS3Transfer
+"""""""""""""""""""""
+
+.. autoclass:: airflow.operators.redshift_to_s3_operator.RedshiftToS3Transfer
+
+
+
+.. _Databricks:
+
+Databricks
+----------
+
+`Databricks <https://databricks.com/>`_ has contributed an Airflow operator which enables
+submitting runs to the Databricks platform. Internally the operator talks to the
+``api/2.0/jobs/runs/submit`` `endpoint <https://docs.databricks.com/api/latest/jobs.html#runs-submit>`_.
+
+DatabricksSubmitRunOperator
+'''''''''''''''''''''''''''
+
+.. autoclass:: airflow.contrib.operators.databricks_operator.DatabricksSubmitRunOperator
+
+
 
 .. _GCP:
 
@@ -20,11 +184,81 @@ Airflow has extensive support for the Google Cloud Platform. But note that most
 Operators are in the contrib section. Meaning that they have a *beta* status, meaning that
 they can have breaking changes between minor releases.
 
+Logging
+'''''''
+
+Airflow can be configured to read and write task logs in Google cloud storage.
+Follow the steps below to enable Google cloud storage logging.
+
+#. Airflow's logging system requires a custom .py file to be located in the ``PYTHONPATH``, so that it's importable from Airflow. Start by creating a directory to store the config file. ``$AIRFLOW_HOME/config`` is recommended.
+#. Create empty files called ``$AIRFLOW_HOME/config/log_config.py`` and ``$AIRFLOW_HOME/config/__init__.py``.
+#. Copy the contents of ``airflow/config_templates/airflow_local_settings.py`` into the ``log_config.py`` file that was just created in the step above.
+#. Customize the following portions of the template:
+
+    .. code-block:: bash
+
+        # Add this variable to the top of the file. Note the trailing slash.
+        GCS_LOG_FOLDER = 'gs://<bucket where logs should be persisted>/'
+
+        # Rename DEFAULT_LOGGING_CONFIG to LOGGING CONFIG
+        LOGGING_CONFIG = ...
+
+        # Add a GCSTaskHandler to the 'handlers' block of the LOGGING_CONFIG variable
+        'gcs.task': {
+            'class': 'airflow.utils.log.gcs_task_handler.GCSTaskHandler',
+            'formatter': 'airflow.task',
+            'base_log_folder': os.path.expanduser(BASE_LOG_FOLDER),
+            'gcs_log_folder': GCS_LOG_FOLDER,
+            'filename_template': FILENAME_TEMPLATE,
+        },
+
+        # Update the airflow.task and airflow.tas_runner blocks to be 'gcs.task' instead of 'file.task'.
+        'loggers': {
+            'airflow.task': {
+                'handlers': ['gcs.task'],
+                ...
+            },
+            'airflow.task_runner': {
+                'handlers': ['gcs.task'],
+                ...
+            },
+            'airflow': {
+                'handlers': ['console'],
+                ...
+            },
+        }
+
+#. Make sure a Google cloud platform connection hook has been defined in Airflow. The hook should have read and write access to the Google cloud storage bucket defined above in ``GCS_LOG_FOLDER``.
+
+#. Update ``$AIRFLOW_HOME/airflow.cfg`` to contain:
+
+    .. code-block:: bash
+
+        task_log_reader = gcs.task
+        logging_config_class = log_config.LOGGING_CONFIG
+        remote_log_conn_id = <name of the Google cloud platform hook>
+
+#. Restart the Airflow webserver and scheduler, and trigger (or wait for) a new task execution.
+#. Verify that logs are showing up for newly executed tasks in the bucket you've defined.
+#. Verify that the Google cloud storage viewer is working in the UI. Pull up a newly executed task, and verify that you see something like:
+
+    .. code-block:: bash
+
+        *** Reading remote log from gs://<bucket where logs should be persisted>/example_bash_operator/run_this_last/2017-10-03T00:00:00/16.log.
+        [2017-10-03 21:57:50,056] {cli.py:377} INFO - Running on host chrisr-00532
+        [2017-10-03 21:57:50,093] {base_task_runner.py:115} INFO - Running: ['bash', '-c', u'airflow run example_bash_operator run_this_last 2017-10-03T00:00:00 --job_id 47 --raw -sd DAGS_FOLDER/example_dags/example_bash_operator.py']
+        [2017-10-03 21:57:51,264] {base_task_runner.py:98} INFO - Subtask: [2017-10-03 21:57:51,263] {__init__.py:45} INFO - Using executor SequentialExecutor
+        [2017-10-03 21:57:51,306] {base_task_runner.py:98} INFO - Subtask: [2017-10-03 21:57:51,306] {models.py:186} INFO - Filling up the DagBag from /airflow/dags/example_dags/example_bash_operator.py
+
+Note the top line that says it's reading from the remote log file.
+
+Please be aware that if you were persisting logs to Google cloud storage using the old-style airflow.cfg configuration method, the old logs will no longer be visible in the Airflow UI, though they'll still exist in Google cloud storage. This is a backwards incompatbile change. If you are unhappy with it, you can change the ``FILENAME_TEMPLATE`` to reflect the old-style log filename format.
+
 BigQuery
 ''''''''
 
 BigQuery Operators
-^^^^^^^^^^^^^^^^^^
+""""""""""""""""""
 
 - :ref:`BigQueryCheckOperator` : Performs checks against a SQL query that will return a single row with different values.
 - :ref:`BigQueryValueCheckOperator` : Performs a simple value check using SQL code.
@@ -37,48 +271,48 @@ BigQuery Operators
 .. _BigQueryCheckOperator:
 
 BigQueryCheckOperator
-"""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryCheckOperator
 
 .. _BigQueryValueCheckOperator:
 
 BigQueryValueCheckOperator
-""""""""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryValueCheckOperator
 
 .. _BigQueryIntervalCheckOperator:
 
 BigQueryIntervalCheckOperator
-"""""""""""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.bigquery_check_operator.BigQueryIntervalCheckOperator
 
 .. _BigQueryOperator:
 
 BigQueryOperator
-""""""""""""""""
+^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator
 
 .. _BigQueryToBigQueryOperator:
 
 BigQueryToBigQueryOperator
-""""""""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator
 
 .. _BigQueryToCloudStorageOperator:
 
 BigQueryToCloudStorageOperator
-""""""""""""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.bigquery_to_gcs.BigQueryToCloudStorageOperator
 
 
 BigQueryHook
-^^^^^^^^^^^^
+""""""""""""
 
 .. autoclass:: airflow.contrib.hooks.bigquery_hook.BigQueryHook
     :members:
@@ -88,14 +322,15 @@ Cloud DataFlow
 ''''''''''''''
 
 DataFlow Operators
-^^^^^^^^^^^^^^^^^^
+""""""""""""""""""
 
-- :ref:`DataFlowJavaOperator` :
+- :ref:`DataFlowJavaOperator` : launching Cloud Dataflow jobs written in Java.
+- :ref:`DataFlowPythonOperator` : launching Cloud Dataflow jobs written in python.
 
 .. _DataFlowJavaOperator:
 
 DataFlowJavaOperator
-""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowJavaOperator
 
@@ -133,8 +368,16 @@ DataFlowJavaOperator
         },
         dag=dag)
 
+.. _DataFlowPythonOperator:
+
+DataFlowPythonOperator
+^^^^^^^^^^^^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.operators.dataflow_operator.DataFlowPythonOperator
+
+
 DataFlowHook
-^^^^^^^^^^^^
+""""""""""""
 
 .. autoclass:: airflow.contrib.hooks.gcp_dataflow_hook.DataFlowHook
     :members:
@@ -145,7 +388,7 @@ Cloud DataProc
 ''''''''''''''
 
 DataProc Operators
-^^^^^^^^^^^^^^^^^^
+""""""""""""""""""
 
 - :ref:`DataProcPigOperator` : Start a Pig query Job on a Cloud DataProc cluster.
 - :ref:`DataProcHiveOperator` : Start a Hive query Job on a Cloud DataProc cluster.
@@ -157,35 +400,35 @@ DataProc Operators
 .. _DataProcPigOperator:
 
 DataProcPigOperator
-"""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcPigOperator
 
 .. _DataProcHiveOperator:
 
 DataProcHiveOperator
-""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHiveOperator
 
 .. _DataProcSparkSqlOperator:
 
 DataProcSparkSqlOperator
-""""""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkSqlOperator
 
 .. _DataProcSparkOperator:
 
 DataProcSparkOperator
-"""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcSparkOperator
 
 .. _DataProcHadoopOperator:
 
 DataProcHadoopOperator
-""""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.dataproc_operator.DataProcHadoopOperator
 
@@ -201,22 +444,73 @@ DataProcPySparkOperator
 Cloud Datastore
 '''''''''''''''
 
-Datastore Operators
-^^^^^^^^^^^^^^^^^^^
-
 DatastoreHook
-~~~~~~~~~~~~~
+"""""""""""""
 
 .. autoclass:: airflow.contrib.hooks.datastore_hook.DatastoreHook
     :members:
 
 
+Cloud ML Engine
+'''''''''''''''
+
+Cloud ML Engine Operators
+"""""""""""""""""""""""""
+
+- :ref:`MLEngineBatchPredictionOperator` : Start a Cloud ML Engine batch prediction job.
+- :ref:`MLEngineModelOperator` : Manages a Cloud ML Engine model.
+- :ref:`MLEngineTrainingOperator` : Start a Cloud ML Engine training job.
+- :ref:`MLEngineVersionOperator` : Manages a Cloud ML Engine model version.
+
+.. _MLEngineBatchPredictionOperator:
+
+MLEngineBatchPredictionOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineBatchPredictionOperator
+    :members:
+
+.. _MLEngineModelOperator:
+
+MLEngineModelOperator
+^^^^^^^^^^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineModelOperator
+    :members:
+
+.. _MLEngineTrainingOperator:
+
+MLEngineTrainingOperator
+^^^^^^^^^^^^^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator
+    :members:
+
+.. _MLEngineVersionOperator:
+
+MLEngineVersionOperator
+^^^^^^^^^^^^^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineVersionOperator
+    :members:
+
+Cloud ML Engine Hook
+""""""""""""""""""""
+
+.. _MLEngineHook:
+
+MLEngineHook
+^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.hooks.gcp_mlengine_hook.MLEngineHook
+    :members:
+
 
 Cloud Storage
 '''''''''''''
 
 Storage Operators
-^^^^^^^^^^^^^^^^^
+"""""""""""""""""
 
 - :ref:`GoogleCloudStorageDownloadOperator` : Downloads a file from Google Cloud Storage.
 - :ref:`GoogleCloudStorageToBigQueryOperator` : Loads files from Google cloud storage into BigQuery.
@@ -224,7 +518,7 @@ Storage Operators
 .. _GoogleCloudStorageDownloadOperator:
 
 GoogleCloudStorageDownloadOperator
-""""""""""""""""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator
     :members:
@@ -232,15 +526,14 @@ GoogleCloudStorageDownloadOperator
 .. _GoogleCloudStorageToBigQueryOperator:
 
 GoogleCloudStorageToBigQueryOperator
-""""""""""""""""""""""""""""""""""""
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
 .. autoclass:: airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator
     :members:
 
 
 GoogleCloudStorageHook
-^^^^^^^^^^^^^^^^^^^^^^
+""""""""""""""""""""""
 
 .. autoclass:: airflow.contrib.hooks.gcs_hook.GoogleCloudStorageHook
     :members:
-

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/scheduler.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/scheduler.rst.txt b/_sources/scheduler.rst.txt
index 749d58a..8029eb0 100644
--- a/_sources/scheduler.rst.txt
+++ b/_sources/scheduler.rst.txt
@@ -91,6 +91,7 @@ will do, is to instruct the scheduler to only create a DAG Run for the most curr
 interval series.
 
 .. code:: python
+
     """
     Code that goes along with the Airflow tutorial located at:
     https://github.com/airbnb/airflow/blob/master/airflow/example_dags/tutorial.py
@@ -104,7 +105,7 @@ interval series.
         'owner': 'airflow',
         'depends_on_past': False,
         'start_date': datetime(2015, 12, 1),
-        'email': ['airflow@airflow.com'],
+        'email': ['airflow@example.com'],
         'email_on_failure': False,
         'email_on_retry': False,
         'retries': 1,
@@ -146,8 +147,19 @@ To Keep in Mind
 
 Here are some of the ways you can **unblock tasks**:
 
-* From the UI, you can **clear** (as in delete the status of) individual task instances from the task instances dialog, while defining whether you want to includes the past/future and the upstream/downstream dependencies. Note that a confirmation window comes next and allows you to see the set you are about to clear.
-* The CLI command ``airflow clear -h`` has lots of options when it comes to clearing task instance states, including specifying date ranges, targeting task_ids by specifying a regular expression, flags for including upstream and downstream relatives, and targeting task instances in specific states (``failed``, or ``success``)
-* Marking task instances as successful can be done through the UI. This is mostly to fix false negatives, or for instance when the fix has been applied outside of Airflow.
-* The ``airflow backfill`` CLI subcommand has a flag to ``--mark_success`` and allows selecting subsections of the DAG as well as specifying date ranges.
+* From the UI, you can **clear** (as in delete the status of) individual task instances
+  from the task instances dialog, while defining whether you want to includes the past/future
+  and the upstream/downstream dependencies. Note that a confirmation window comes next and
+  allows you to see the set you are about to clear. You can also clear all task instances
+  associated with the dag.
+* The CLI command ``airflow clear -h`` has lots of options when it comes to clearing task instance
+  states, including specifying date ranges, targeting task_ids by specifying a regular expression,
+  flags for including upstream and downstream relatives, and targeting task instances in specific
+  states (``failed``, or ``success``)
+* Clearing a task instance will no longer delete the task instance record. Instead it updates
+  max_tries and set the current task instance state to be None.
+* Marking task instances as successful can be done through the UI. This is mostly to fix false negatives,
+  or for instance when the fix has been applied outside of Airflow.
+* The ``airflow backfill`` CLI subcommand has a flag to ``--mark_success`` and allows selecting
+  subsections of the DAG as well as specifying date ranges.
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/security.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/security.rst.txt b/_sources/security.rst.txt
index 70db606..f33ff04 100644
--- a/_sources/security.rst.txt
+++ b/_sources/security.rst.txt
@@ -6,7 +6,9 @@ to the web application is to do it at the network level, or by using
 SSH tunnels.
 
 It is however possible to switch on authentication by either using one of the supplied
-backends or create your own.
+backends or creating your own.
+
+Be sure to checkout :doc:`api` for securing the API.
 
 Web Authentication
 ------------------
@@ -72,6 +74,11 @@ Valid search_scope options can be found in the `ldap3 Documentation <http://ldap
     user_filter = objectClass=*
     # in case of Active Directory you would use: user_name_attr = sAMAccountName
     user_name_attr = uid
+    # group_member_attr should be set accordingly with *_filter
+    # eg :
+    #     group_member_attr = groupMembership
+    #     superuser_filter = groupMembership=CN=airflow-super-users...
+    group_member_attr = memberOf
     superuser_filter = memberOf=CN=airflow-super-users,OU=Groups,OU=RWC,OU=US,OU=NORAM,DC=example,DC=com
     data_profiler_filter = memberOf=CN=airflow-data-profilers,OU=Groups,OU=RWC,OU=US,OU=NORAM,DC=example,DC=com
     bind_user = cn=Manager,dc=example,dc=com
@@ -89,7 +96,7 @@ Roll your own
 
 Airflow uses ``flask_login`` and
 exposes a set of hooks in the ``airflow.default_login`` module. You can
-alter the content and make it part of the ``PYTHONPATH`` and configure it as a backend in ``airflow.cfg```.
+alter the content and make it part of the ``PYTHONPATH`` and configure it as a backend in ``airflow.cfg``.
 
 .. code-block:: bash
 
@@ -100,12 +107,14 @@ alter the content and make it part of the ``PYTHONPATH`` and configure it as a b
 Multi-tenancy
 -------------
 
-You can filter the list of dags in webserver by owner name, when authentication
-is turned on, by setting webserver.filter_by_owner as true in your ``airflow.cfg``
-With this, when a user authenticates and logs into webserver, it will see only the dags
-which it is owner of. A super_user, will be able to see all the dags although.
-This makes the web UI a multi-tenant UI, where a user will only be able to see dags
-created by itself.
+You can filter the list of dags in webserver by owner name when authentication
+is turned on by setting ``webserver:filter_by_owner`` in your config. With this, a user will see
+only the dags which it is owner of, unless it is a superuser.
+
+.. code-block:: bash
+
+    [webserver]
+    filter_by_owner = True
 
 
 Kerberos
@@ -118,17 +127,18 @@ to authenticate against kerberized services.
 Limitations
 '''''''''''
 
-Please note that at this time not all hooks have been adjusted to make use of this functionality yet.
+Please note that at this time, not all hooks have been adjusted to make use of this functionality.
 Also it does not integrate kerberos into the web interface and you will have to rely on network
 level security for now to make sure your service remains secure.
 
-Celery integration has not been tried and tested yet. However if you generate a key tab for every host
-and launch a ticket renewer next to every worker it will most likely work.
+Celery integration has not been tried and tested yet. However, if you generate a key tab for every
+host and launch a ticket renewer next to every worker it will most likely work.
 
 Enabling kerberos
 '''''''''''''''''
 
-#### Airflow
+Airflow
+^^^^^^^
 
 To enable kerberos you will need to generate a (service) key tab.
 
@@ -160,7 +170,8 @@ Launch the ticket renewer by
     # run ticket renewer
     airflow kerberos
 
-#### Hadoop
+Hadoop
+^^^^^^
 
 If want to use impersonation this needs to be enabled in ``core-site.xml`` of your hadoop config.
 
@@ -186,8 +197,8 @@ Of course if you need to tighten your security replace the asterisk with somethi
 Using kerberos authentication
 '''''''''''''''''''''''''''''
 
-The hive hook has been updated to take advantage of kerberos authentication. To allow your DAGs to use it simply
-update the connection details with, for example:
+The hive hook has been updated to take advantage of kerberos authentication. To allow your DAGs to
+use it, simply update the connection details with, for example:
 
 .. code-block:: bash
 
@@ -197,7 +208,7 @@ Adjust the principal to your settings. The _HOST part will be replaced by the fu
 the server.
 
 You can specify if you would like to use the dag owner as the user for the connection or the user specified in the login
-section of the connection. For the login user specify the following as extra:
+section of the connection. For the login user, specify the following as extra:
 
 .. code-block:: bash
 
@@ -209,7 +220,7 @@ For the DAG owner use:
 
     { "use_beeline": true, "principal": "hive/_HOST@EXAMPLE.COM", "proxy_user": "owner"}
 
-and in your DAG, when initializing the HiveOperator, specify
+and in your DAG, when initializing the HiveOperator, specify:
 
 .. code-block:: bash
 
@@ -226,9 +237,6 @@ against an installation of GitHub Enterprise using OAuth2. You can optionally
 specify a team whitelist (composed of slug cased team names) to restrict login
 to only members of those teams.
 
-*NOTE* If you do not specify a team whitelist, anyone with a valid account on
-your GHE installation will be able to login to Airflow.
-
 .. code-block:: bash
 
     [webserver]
@@ -242,6 +250,9 @@ your GHE installation will be able to login to Airflow.
     oauth_callback_route = /example/ghe_oauth/callback
     allowed_teams = 1, 345, 23
 
+.. note:: If you do not specify a team whitelist, anyone with a valid account on
+   your GHE installation will be able to login to Airflow.
+
 Setting up GHE Authentication
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
@@ -283,6 +294,7 @@ backend. In order to setup an application:
 
 1. Navigate to https://console.developers.google.com/apis/
 2. Select 'Credentials' from the left hand nav
+2. Select 'Credentials' from the left hand nav
 3. Click 'Create credentials' and choose 'OAuth client ID'
 4. Choose 'Web application'
 5. Fill in the required information (the 'Authorized redirect URIs' must be fully qualifed e.g. http://airflow.example.com/oauth2callback)
@@ -311,24 +323,44 @@ standard port 443, you'll need to configure that too. Be aware that super user p
     web_server_port = 443
     base_url = http://<hostname or IP>:443
 
+Enable CeleryExecutor with SSL. Ensure you properly generate client and server
+certs and keys.
+
+.. code-block:: bash
+
+    [celery]
+    CELERY_SSL_ACTIVE = True
+    CELERY_SSL_KEY = <path to key>
+    CELERY_SSL_CERT = <path to cert>
+    CELERY_SSL_CACERT = <path to cacert>
+
 Impersonation
-'''''''''''''
+-------------
 
 Airflow has the ability to impersonate a unix user while running task
 instances based on the task's ``run_as_user`` parameter, which takes a user's name.
 
-*NOTE* For impersonations to work, Airflow must be run with `sudo` as subtasks are run
+**NOTE:** For impersonations to work, Airflow must be run with `sudo` as subtasks are run
 with `sudo -u` and permissions of files are changed. Furthermore, the unix user needs to
 exist on the worker. Here is what a simple sudoers file entry could look like to achieve
 this, assuming as airflow is running as the `airflow` user. Note that this means that
 the airflow user must be trusted and treated the same way as the root user.
 
 .. code-block:: none
+
     airflow ALL=(ALL) NOPASSWD: ALL
 
+
 Subtasks with impersonation will still log to the same folder, except that the files they
 log to will have permissions changed such that only the unix user can write to it.
 
-*Default impersonation* To prevent tasks that don't use impersonation to be run with
-`sudo` privileges, you can set the `default_impersonation` config in `core` which sets a
-default user impersonate if `run_as_user` is not set.
+Default Impersonation
+'''''''''''''''''''''
+To prevent tasks that don't use impersonation to be run with `sudo` privileges, you can set the
+``core:default_impersonation`` config which sets a default user impersonate if `run_as_user` is
+not set.
+
+.. code-block:: bash
+
+    [core]
+    default_impersonation = airflow

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/start.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/start.rst.txt b/_sources/start.rst.txt
index cc41d4b..36128f5 100644
--- a/_sources/start.rst.txt
+++ b/_sources/start.rst.txt
@@ -11,7 +11,7 @@ The installation is quick and straightforward.
     export AIRFLOW_HOME=~/airflow
 
     # install from pypi using pip
-    pip install airflow
+    pip install apache-airflow
 
     # initialize the database
     airflow initdb

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_sources/tutorial.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/tutorial.rst.txt b/_sources/tutorial.rst.txt
index 97bbe11..8d2203c 100644
--- a/_sources/tutorial.rst.txt
+++ b/_sources/tutorial.rst.txt
@@ -26,7 +26,7 @@ complicated, a line by line explanation follows below.
         'owner': 'airflow',
         'depends_on_past': False,
         'start_date': datetime(2015, 6, 1),
-        'email': ['airflow@airflow.com'],
+        'email': ['airflow@example.com'],
         'email_on_failure': False,
         'email_on_retry': False,
         'retries': 1,
@@ -117,7 +117,7 @@ of default parameters that we can use when creating tasks.
         'owner': 'airflow',
         'depends_on_past': False,
         'start_date': datetime(2015, 6, 1),
-        'email': ['airflow@airflow.com'],
+        'email': ['airflow@example.com'],
         'email_on_failure': False,
         'email_on_retry': False,
         'retries': 1,
@@ -195,7 +195,8 @@ templates.
 This tutorial barely scratches the surface of what you can do with
 templating in Airflow, but the goal of this section is to let you know
 this feature exists, get you familiar with double curly brackets, and
-point to the most common template variable: ``{{ ds }}``.
+point to the most common template variable: ``{{ ds }}`` (today's "date
+stamp").
 
 .. code:: python
 
@@ -231,6 +232,16 @@ different languages, and general flexibility in structuring pipelines. It is
 also possible to define your ``template_searchpath`` as pointing to any folder
 locations in the DAG constructor call.
 
+Using that same DAG constructor call, it is possible to define
+``user_defined_macros`` which allow you to specify your own variables.
+For example, passing ``dict(foo='bar')`` to this argument allows you
+to use ``{{ foo }}`` in your templates. Moreover, specifying
+``user_defined_filters`` allow you to register you own filters. For example,
+passing ``dict(hello=lambda name: 'Hello %s' % name)`` to this argument allows
+you to use ``{{ 'world' | hello }}`` in your templates. For more information
+regarding custom filters have a look at the
+`Jinja Documentation <http://jinja.pocoo.org/docs/dev/api/#writing-filters>`_
+
 For more information on the variables and macros that can be referenced
 in templates, make sure to read through the :ref:`macros` section
 
@@ -278,7 +289,7 @@ something like this:
         'owner': 'airflow',
         'depends_on_past': False,
         'start_date': datetime(2015, 6, 1),
-        'email': ['airflow@airflow.com'],
+        'email': ['airflow@example.com'],
         'email_on_failure': False,
         'email_on_retry': False,
         'retries': 1,

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_static/basic.css
----------------------------------------------------------------------
diff --git a/_static/basic.css b/_static/basic.css
index 7ed0e58..3c7223b 100644
--- a/_static/basic.css
+++ b/_static/basic.css
@@ -4,7 +4,7 @@
  *
  * Sphinx stylesheet -- basic theme.
  *
- * :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
+ * :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS.
  * :license: BSD, see LICENSE for details.
  *
  */
@@ -398,6 +398,13 @@ table.field-list td, table.field-list th {
     margin: 0;
 }
 
+.field-name {
+    -moz-hyphens: manual;
+    -ms-hyphens: manual;
+    -webkit-hyphens: manual;
+    hyphens: manual;
+}
+
 /* -- other body styles ----------------------------------------------------- */
 
 ol.arabic {
@@ -438,10 +445,14 @@ dd {
     margin-left: 30px;
 }
 
-dt:target, .highlighted {
+dt:target, span.highlighted {
     background-color: #fbe54e;
 }
 
+rect.highlighted {
+    fill: #fbe54e;
+}
+
 dl.glossary dt {
     font-weight: bold;
     font-size: 1.1em;

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_static/doctools.js
----------------------------------------------------------------------
diff --git a/_static/doctools.js b/_static/doctools.js
index 8163495..24992e6 100644
--- a/_static/doctools.js
+++ b/_static/doctools.js
@@ -4,7 +4,7 @@
  *
  * Sphinx JavaScript utilities for all documentation.
  *
- * :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
+ * :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS.
  * :license: BSD, see LICENSE for details.
  *
  */
@@ -45,7 +45,7 @@ jQuery.urlencode = encodeURIComponent;
  * it will always return arrays of strings for the value parts.
  */
 jQuery.getQueryParameters = function(s) {
-  if (typeof s == 'undefined')
+  if (typeof s === 'undefined')
     s = document.location.search;
   var parts = s.substr(s.indexOf('?') + 1).split('&');
   var result = {};
@@ -66,29 +66,53 @@ jQuery.getQueryParameters = function(s) {
  * span elements with the given class name.
  */
 jQuery.fn.highlightText = function(text, className) {
-  function highlight(node) {
-    if (node.nodeType == 3) {
+  function highlight(node, addItems) {
+    if (node.nodeType === 3) {
       var val = node.nodeValue;
       var pos = val.toLowerCase().indexOf(text);
       if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) {
-        var span = document.createElement("span");
-        span.className = className;
+        var span;
+        var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
+        if (isInSVG) {
+          span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
+        } else {
+          span = document.createElement("span");
+          span.className = className;
+        }
         span.appendChild(document.createTextNode(val.substr(pos, text.length)));
         node.parentNode.insertBefore(span, node.parentNode.insertBefore(
           document.createTextNode(val.substr(pos + text.length)),
           node.nextSibling));
         node.nodeValue = val.substr(0, pos);
+        if (isInSVG) {
+          var bbox = span.getBBox();
+          var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
+       	  rect.x.baseVal.value = bbox.x;
+          rect.y.baseVal.value = bbox.y;
+          rect.width.baseVal.value = bbox.width;
+          rect.height.baseVal.value = bbox.height;
+          rect.setAttribute('class', className);
+          var parentOfText = node.parentNode.parentNode;
+          addItems.push({
+              "parent": node.parentNode,
+              "target": rect});
+        }
       }
     }
     else if (!jQuery(node).is("button, select, textarea")) {
       jQuery.each(node.childNodes, function() {
-        highlight(this);
+        highlight(this, addItems);
       });
     }
   }
-  return this.each(function() {
-    highlight(this);
+  var addItems = [];
+  var result = this.each(function() {
+    highlight(this, addItems);
   });
+  for (var i = 0; i < addItems.length; ++i) {
+    jQuery(addItems[i].parent).before(addItems[i].target);
+  }
+  return result;
 };
 
 /*
@@ -131,21 +155,21 @@ var Documentation = {
    * i18n support
    */
   TRANSLATIONS : {},
-  PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; },
+  PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; },
   LOCALE : 'unknown',
 
   // gettext and ngettext don't access this so that the functions
   // can safely bound to a different name (_ = Documentation.gettext)
   gettext : function(string) {
     var translated = Documentation.TRANSLATIONS[string];
-    if (typeof translated == 'undefined')
+    if (typeof translated === 'undefined')
       return string;
-    return (typeof translated == 'string') ? translated : translated[0];
+    return (typeof translated === 'string') ? translated : translated[0];
   },
 
   ngettext : function(singular, plural, n) {
     var translated = Documentation.TRANSLATIONS[singular];
-    if (typeof translated == 'undefined')
+    if (typeof translated === 'undefined')
       return (n == 1) ? singular : plural;
     return translated[Documentation.PLURALEXPR(n)];
   },
@@ -216,7 +240,7 @@ var Documentation = {
       var src = $(this).attr('src');
       var idnum = $(this).attr('id').substr(7);
       $('tr.cg-' + idnum).toggle();
-      if (src.substr(-9) == 'minus.png')
+      if (src.substr(-9) === 'minus.png')
         $(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
       else
         $(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
@@ -248,7 +272,7 @@ var Documentation = {
     var path = document.location.pathname;
     var parts = path.split(/\//);
     $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
-      if (this == '..')
+      if (this === '..')
         parts.pop();
     });
     var url = parts.join('/');

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_static/pygments.css
----------------------------------------------------------------------
diff --git a/_static/pygments.css b/_static/pygments.css
index 8213e90..20c4814 100644
--- a/_static/pygments.css
+++ b/_static/pygments.css
@@ -47,8 +47,10 @@
 .highlight .mh { color: #208050 } /* Literal.Number.Hex */
 .highlight .mi { color: #208050 } /* Literal.Number.Integer */
 .highlight .mo { color: #208050 } /* Literal.Number.Oct */
+.highlight .sa { color: #4070a0 } /* Literal.String.Affix */
 .highlight .sb { color: #4070a0 } /* Literal.String.Backtick */
 .highlight .sc { color: #4070a0 } /* Literal.String.Char */
+.highlight .dl { color: #4070a0 } /* Literal.String.Delimiter */
 .highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */
 .highlight .s2 { color: #4070a0 } /* Literal.String.Double */
 .highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */
@@ -59,7 +61,9 @@
 .highlight .s1 { color: #4070a0 } /* Literal.String.Single */
 .highlight .ss { color: #517918 } /* Literal.String.Symbol */
 .highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */
+.highlight .fm { color: #06287e } /* Name.Function.Magic */
 .highlight .vc { color: #bb60d5 } /* Name.Variable.Class */
 .highlight .vg { color: #bb60d5 } /* Name.Variable.Global */
 .highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */
+.highlight .vm { color: #bb60d5 } /* Name.Variable.Magic */
 .highlight .il { color: #208050 } /* Literal.Number.Integer.Long */
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_static/searchtools.js
----------------------------------------------------------------------
diff --git a/_static/searchtools.js b/_static/searchtools.js
index bbfb3ac..33fedf4 100644
--- a/_static/searchtools.js
+++ b/_static/searchtools.js
@@ -4,7 +4,7 @@
  *
  * Sphinx JavaScript utilities for the full-text search.
  *
- * :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
+ * :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS.
  * :license: BSD, see LICENSE for details.
  *
  */
@@ -540,6 +540,9 @@ var Search = {
           });
         } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
           var suffix = DOCUMENTATION_OPTIONS.SOURCELINK_SUFFIX;
+          if (suffix === undefined) {
+            suffix = '.txt';
+          }
           $.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + '_sources/' + item[5] + (item[5].slice(-suffix.length) === suffix ? '' : suffix),
                   dataType: "text",
                   complete: function(jqxhr, textstatus) {

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/_static/websupport.js
----------------------------------------------------------------------
diff --git a/_static/websupport.js b/_static/websupport.js
index 98e7f40..53f6a45 100644
--- a/_static/websupport.js
+++ b/_static/websupport.js
@@ -4,7 +4,7 @@
  *
  * sphinx.websupport utilities for all documentation.
  *
- * :copyright: Copyright 2007-2016 by the Sphinx team, see AUTHORS.
+ * :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS.
  * :license: BSD, see LICENSE for details.
  *
  */

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/28a3eb60/api.html
----------------------------------------------------------------------
diff --git a/api.html b/api.html
index 7aca3bb..e9244a0 100644
--- a/api.html
+++ b/api.html
@@ -13,6 +13,8 @@
 
   
   
+  
+  
 
   
 
@@ -81,7 +83,10 @@
           
             
             
-                <ul class="current">
+              
+            
+            
+              <ul class="current">
 <li class="toctree-l1"><a class="reference internal" href="project.html">Project</a></li>
 <li class="toctree-l1"><a class="reference internal" href="license.html">License</a></li>
 <li class="toctree-l1"><a class="reference internal" href="start.html">Quick Start</a></li>
@@ -191,16 +196,23 @@ configure as follows:</p>
 </div>
 <div class="section" id="authentication">
 <h2>Authentication<a class="headerlink" href="#authentication" title="Permalink to this headline">¶</a></h2>
-<p>Only Kerberos authentication is currently supported for the API. To enable this set the following
-in the configuration:</p>
-<div class="highlight-bash"><div class="highlight"><pre><span></span><span class="o">[</span>api<span class="o">]</span>
-<span class="nv">auth_backend</span> <span class="o">=</span> airflow.api.auth.backend.default
+<p>Authentication for the API is handled separately to the Web Authentication. The default is to not
+require any authentication on the API – i.e. wide open by default. This is not recommended if your
+Airflow webserver is publicly accessible, and you should probably use the deny all backend:</p>
+<div class="highlight-ini"><div class="highlight"><pre><span></span><span class="k">[api]</span>
+<span class="na">auth_backend</span> <span class="o">=</span> <span class="s">airflow.api.auth.backend.deny_all</span>
+</pre></div>
+</div>
+<p>Kerberos is the only “real” authentication mechanism currently supported for the API. To enable
+this set the following in the configuration:</p>
+<div class="highlight-ini"><div class="highlight"><pre><span></span><span class="k">[api]</span>
+<span class="na">auth_backend</span> <span class="o">=</span> <span class="s">airflow.api.auth.backend.kerberos_auth</span>
 
-<span class="o">[</span>kerberos<span class="o">]</span>
-<span class="nv">keytab</span> <span class="o">=</span> &lt;KEYTAB&gt;
+<span class="k">[kerberos]</span>
+<span class="na">keytab</span> <span class="o">=</span> <span class="s">&lt;KEYTAB&gt;</span>
 </pre></div>
 </div>
-<p>The Kerberos service is configured as <cite>airflow/fully.qualified.domainname&#64;REALM</cite>. Make sure this
+<p>The Kerberos service is configured as <code class="docutils literal"><span class="pre">airflow/fully.qualified.domainname&#64;REALM</span></code>. Make sure this
 principal exists in the keytab file.</p>
 </div>
 </div>