You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@airflow.apache.org by ka...@apache.org on 2018/08/29 20:24:47 UTC

[19/48] incubator-airflow-site git commit: 1.10.0 with Updated Api Reference

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_modules/index.html
----------------------------------------------------------------------
diff --git a/_modules/index.html b/_modules/index.html
index 98ad33c..aec8b56 100644
--- a/_modules/index.html
+++ b/_modules/index.html
@@ -24,25 +24,17 @@
 
   
 
-  
-  
-    <link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
-  
-
-  
-
-  
-        <link rel="index" title="Index"
-              href="../genindex.html"/>
-        <link rel="search" title="Search" href="../search.html"/>
-    <link rel="top" title="Airflow Documentation" href="../index.html"/> 
+  <link rel="stylesheet" href="../_static/css/theme.css" type="text/css" />
+  <link rel="stylesheet" href="../_static/pygments.css" type="text/css" />
+    <link rel="index" title="Index" href="../genindex.html" />
+    <link rel="search" title="Search" href="../search.html" /> 
 
   
   <script src="../_static/js/modernizr.min.js"></script>
 
 </head>
 
-<body class="wy-body-for-nav" role="document">
+<body class="wy-body-for-nav">
 
    
   <div class="wy-grid-for-nav">
@@ -115,7 +107,7 @@
     <section data-toggle="wy-nav-shift" class="wy-nav-content-wrap">
 
       
-      <nav class="wy-nav-top" role="navigation" aria-label="top navigation">
+      <nav class="wy-nav-top" aria-label="top navigation">
         
           <i data-toggle="wy-nav-top" class="fa fa-bars"></i>
           <a href="../index.html">Airflow</a>
@@ -123,9 +115,10 @@
       </nav>
 
 
-      
       <div class="wy-nav-content">
+        
         <div class="rst-content">
+        
           
 
 
@@ -153,8 +146,6 @@
     
       <li class="wy-breadcrumbs-aside">
         
-            
-        
       </li>
     
   </ul>
@@ -170,7 +161,11 @@
 <li><a href="airflow/contrib/hooks/aws_dynamodb_hook.html">airflow.contrib.hooks.aws_dynamodb_hook</a></li>
 <li><a href="airflow/contrib/hooks/aws_hook.html">airflow.contrib.hooks.aws_hook</a></li>
 <li><a href="airflow/contrib/hooks/aws_lambda_hook.html">airflow.contrib.hooks.aws_lambda_hook</a></li>
+<li><a href="airflow/contrib/hooks/azure_data_lake_hook.html">airflow.contrib.hooks.azure_data_lake_hook</a></li>
+<li><a href="airflow/contrib/hooks/azure_fileshare_hook.html">airflow.contrib.hooks.azure_fileshare_hook</a></li>
 <li><a href="airflow/contrib/hooks/bigquery_hook.html">airflow.contrib.hooks.bigquery_hook</a></li>
+<li><a href="airflow/contrib/hooks/cassandra_hook.html">airflow.contrib.hooks.cassandra_hook</a></li>
+<li><a href="airflow/contrib/hooks/cloudant_hook.html">airflow.contrib.hooks.cloudant_hook</a></li>
 <li><a href="airflow/contrib/hooks/databricks_hook.html">airflow.contrib.hooks.databricks_hook</a></li>
 <li><a href="airflow/contrib/hooks/datadog_hook.html">airflow.contrib.hooks.datadog_hook</a></li>
 <li><a href="airflow/contrib/hooks/datastore_hook.html">airflow.contrib.hooks.datastore_hook</a></li>
@@ -179,6 +174,7 @@
 <li><a href="airflow/contrib/hooks/fs_hook.html">airflow.contrib.hooks.fs_hook</a></li>
 <li><a href="airflow/contrib/hooks/ftp_hook.html">airflow.contrib.hooks.ftp_hook</a></li>
 <li><a href="airflow/contrib/hooks/gcp_api_base_hook.html">airflow.contrib.hooks.gcp_api_base_hook</a></li>
+<li><a href="airflow/contrib/hooks/gcp_container_hook.html">airflow.contrib.hooks.gcp_container_hook</a></li>
 <li><a href="airflow/contrib/hooks/gcp_dataflow_hook.html">airflow.contrib.hooks.gcp_dataflow_hook</a></li>
 <li><a href="airflow/contrib/hooks/gcp_dataproc_hook.html">airflow.contrib.hooks.gcp_dataproc_hook</a></li>
 <li><a href="airflow/contrib/hooks/gcp_mlengine_hook.html">airflow.contrib.hooks.gcp_mlengine_hook</a></li>
@@ -186,12 +182,15 @@
 <li><a href="airflow/contrib/hooks/gcs_hook.html">airflow.contrib.hooks.gcs_hook</a></li>
 <li><a href="airflow/contrib/hooks/jenkins_hook.html">airflow.contrib.hooks.jenkins_hook</a></li>
 <li><a href="airflow/contrib/hooks/jira_hook.html">airflow.contrib.hooks.jira_hook</a></li>
+<li><a href="airflow/contrib/hooks/mongo_hook.html">airflow.contrib.hooks.mongo_hook</a></li>
 <li><a href="airflow/contrib/hooks/pinot_hook.html">airflow.contrib.hooks.pinot_hook</a></li>
 <li><a href="airflow/contrib/hooks/qubole_hook.html">airflow.contrib.hooks.qubole_hook</a></li>
 <li><a href="airflow/contrib/hooks/redis_hook.html">airflow.contrib.hooks.redis_hook</a></li>
 <li><a href="airflow/contrib/hooks/redshift_hook.html">airflow.contrib.hooks.redshift_hook</a></li>
+<li><a href="airflow/contrib/hooks/segment_hook.html">airflow.contrib.hooks.segment_hook</a></li>
 <li><a href="airflow/contrib/hooks/sftp_hook.html">airflow.contrib.hooks.sftp_hook</a></li>
 <li><a href="airflow/contrib/hooks/slack_webhook_hook.html">airflow.contrib.hooks.slack_webhook_hook</a></li>
+<li><a href="airflow/contrib/hooks/snowflake_hook.html">airflow.contrib.hooks.snowflake_hook</a></li>
 <li><a href="airflow/contrib/hooks/spark_jdbc_hook.html">airflow.contrib.hooks.spark_jdbc_hook</a></li>
 <li><a href="airflow/contrib/hooks/spark_sql_hook.html">airflow.contrib.hooks.spark_sql_hook</a></li>
 <li><a href="airflow/contrib/hooks/spark_submit_hook.html">airflow.contrib.hooks.spark_submit_hook</a></li>
@@ -199,6 +198,7 @@
 <li><a href="airflow/contrib/hooks/ssh_hook.html">airflow.contrib.hooks.ssh_hook</a></li>
 <li><a href="airflow/contrib/hooks/vertica_hook.html">airflow.contrib.hooks.vertica_hook</a></li>
 <li><a href="airflow/contrib/hooks/wasb_hook.html">airflow.contrib.hooks.wasb_hook</a></li>
+<li><a href="airflow/contrib/hooks/winrm_hook.html">airflow.contrib.hooks.winrm_hook</a></li>
 <li><a href="airflow/contrib/kubernetes/secret.html">airflow.contrib.kubernetes.secret</a></li>
 <li><a href="airflow/contrib/operators/awsbatch_operator.html">airflow.contrib.operators.awsbatch_operator</a></li>
 <li><a href="airflow/contrib/operators/bigquery_check_operator.html">airflow.contrib.operators.bigquery_check_operator</a></li>
@@ -207,18 +207,21 @@
 <li><a href="airflow/contrib/operators/bigquery_table_delete_operator.html">airflow.contrib.operators.bigquery_table_delete_operator</a></li>
 <li><a href="airflow/contrib/operators/bigquery_to_bigquery.html">airflow.contrib.operators.bigquery_to_bigquery</a></li>
 <li><a href="airflow/contrib/operators/bigquery_to_gcs.html">airflow.contrib.operators.bigquery_to_gcs</a></li>
+<li><a href="airflow/contrib/operators/cassandra_to_gcs.html">airflow.contrib.operators.cassandra_to_gcs</a></li>
 <li><a href="airflow/contrib/operators/databricks_operator.html">airflow.contrib.operators.databricks_operator</a></li>
 <li><a href="airflow/contrib/operators/dataflow_operator.html">airflow.contrib.operators.dataflow_operator</a></li>
 <li><a href="airflow/contrib/operators/dataproc_operator.html">airflow.contrib.operators.dataproc_operator</a></li>
 <li><a href="airflow/contrib/operators/datastore_export_operator.html">airflow.contrib.operators.datastore_export_operator</a></li>
 <li><a href="airflow/contrib/operators/datastore_import_operator.html">airflow.contrib.operators.datastore_import_operator</a></li>
 <li><a href="airflow/contrib/operators/discord_webhook_operator.html">airflow.contrib.operators.discord_webhook_operator</a></li>
+<li><a href="airflow/contrib/operators/druid_operator.html">airflow.contrib.operators.druid_operator</a></li>
 <li><a href="airflow/contrib/operators/ecs_operator.html">airflow.contrib.operators.ecs_operator</a></li>
 <li><a href="airflow/contrib/operators/emr_add_steps_operator.html">airflow.contrib.operators.emr_add_steps_operator</a></li>
 <li><a href="airflow/contrib/operators/emr_create_job_flow_operator.html">airflow.contrib.operators.emr_create_job_flow_operator</a></li>
 <li><a href="airflow/contrib/operators/emr_terminate_job_flow_operator.html">airflow.contrib.operators.emr_terminate_job_flow_operator</a></li>
 <li><a href="airflow/contrib/operators/file_to_gcs.html">airflow.contrib.operators.file_to_gcs</a></li>
 <li><a href="airflow/contrib/operators/file_to_wasb.html">airflow.contrib.operators.file_to_wasb</a></li>
+<li><a href="airflow/contrib/operators/gcp_container_operator.html">airflow.contrib.operators.gcp_container_operator</a></li>
 <li><a href="airflow/contrib/operators/gcs_download_operator.html">airflow.contrib.operators.gcs_download_operator</a></li>
 <li><a href="airflow/contrib/operators/gcs_list_operator.html">airflow.contrib.operators.gcs_list_operator</a></li>
 <li><a href="airflow/contrib/operators/gcs_operator.html">airflow.contrib.operators.gcs_operator</a></li>
@@ -226,24 +229,31 @@
 <li><a href="airflow/contrib/operators/gcs_to_gcs.html">airflow.contrib.operators.gcs_to_gcs</a></li>
 <li><a href="airflow/contrib/operators/gcs_to_s3.html">airflow.contrib.operators.gcs_to_s3</a></li>
 <li><a href="airflow/contrib/operators/hipchat_operator.html">airflow.contrib.operators.hipchat_operator</a></li>
+<li><a href="airflow/contrib/operators/hive_to_dynamodb.html">airflow.contrib.operators.hive_to_dynamodb</a></li>
 <li><a href="airflow/contrib/operators/jenkins_job_trigger_operator.html">airflow.contrib.operators.jenkins_job_trigger_operator</a></li>
 <li><a href="airflow/contrib/operators/jira_operator.html">airflow.contrib.operators.jira_operator</a></li>
 <li><a href="airflow/contrib/operators/kubernetes_pod_operator.html">airflow.contrib.operators.kubernetes_pod_operator</a></li>
 <li><a href="airflow/contrib/operators/mlengine_operator.html">airflow.contrib.operators.mlengine_operator</a></li>
+<li><a href="airflow/contrib/operators/mongo_to_s3.html">airflow.contrib.operators.mongo_to_s3</a></li>
 <li><a href="airflow/contrib/operators/mysql_to_gcs.html">airflow.contrib.operators.mysql_to_gcs</a></li>
 <li><a href="airflow/contrib/operators/postgres_to_gcs_operator.html">airflow.contrib.operators.postgres_to_gcs_operator</a></li>
 <li><a href="airflow/contrib/operators/pubsub_operator.html">airflow.contrib.operators.pubsub_operator</a></li>
+<li><a href="airflow/contrib/operators/qubole_check_operator.html">airflow.contrib.operators.qubole_check_operator</a></li>
 <li><a href="airflow/contrib/operators/qubole_operator.html">airflow.contrib.operators.qubole_operator</a></li>
 <li><a href="airflow/contrib/operators/s3_list_operator.html">airflow.contrib.operators.s3_list_operator</a></li>
 <li><a href="airflow/contrib/operators/s3_to_gcs_operator.html">airflow.contrib.operators.s3_to_gcs_operator</a></li>
+<li><a href="airflow/contrib/operators/segment_track_event_operator.html">airflow.contrib.operators.segment_track_event_operator</a></li>
 <li><a href="airflow/contrib/operators/sftp_operator.html">airflow.contrib.operators.sftp_operator</a></li>
 <li><a href="airflow/contrib/operators/slack_webhook_operator.html">airflow.contrib.operators.slack_webhook_operator</a></li>
+<li><a href="airflow/contrib/operators/snowflake_operator.html">airflow.contrib.operators.snowflake_operator</a></li>
 <li><a href="airflow/contrib/operators/spark_jdbc_operator.html">airflow.contrib.operators.spark_jdbc_operator</a></li>
 <li><a href="airflow/contrib/operators/spark_sql_operator.html">airflow.contrib.operators.spark_sql_operator</a></li>
 <li><a href="airflow/contrib/operators/spark_submit_operator.html">airflow.contrib.operators.spark_submit_operator</a></li>
 <li><a href="airflow/contrib/operators/sqoop_operator.html">airflow.contrib.operators.sqoop_operator</a></li>
 <li><a href="airflow/contrib/operators/ssh_operator.html">airflow.contrib.operators.ssh_operator</a></li>
 <li><a href="airflow/contrib/operators/vertica_operator.html">airflow.contrib.operators.vertica_operator</a></li>
+<li><a href="airflow/contrib/operators/vertica_to_hive.html">airflow.contrib.operators.vertica_to_hive</a></li>
+<li><a href="airflow/contrib/operators/winrm_operator.html">airflow.contrib.operators.winrm_operator</a></li>
 <li><a href="airflow/contrib/sensors/aws_redshift_cluster_sensor.html">airflow.contrib.sensors.aws_redshift_cluster_sensor</a></li>
 <li><a href="airflow/contrib/sensors/bash_sensor.html">airflow.contrib.sensors.bash_sensor</a></li>
 <li><a href="airflow/contrib/sensors/bigquery_sensor.html">airflow.contrib.sensors.bigquery_sensor</a></li>
@@ -266,30 +276,46 @@
 <li><a href="airflow/executors/sequential_executor.html">airflow.executors.sequential_executor</a></li>
 <li><a href="airflow/hooks/S3_hook.html">airflow.hooks.S3_hook</a></li>
 <li><a href="airflow/hooks/dbapi_hook.html">airflow.hooks.dbapi_hook</a></li>
+<li><a href="airflow/hooks/docker_hook.html">airflow.hooks.docker_hook</a></li>
+<li><a href="airflow/hooks/druid_hook.html">airflow.hooks.druid_hook</a></li>
 <li><a href="airflow/hooks/hdfs_hook.html">airflow.hooks.hdfs_hook</a></li>
+<li><a href="airflow/hooks/hive_hooks.html">airflow.hooks.hive_hooks</a></li>
 <li><a href="airflow/hooks/http_hook.html">airflow.hooks.http_hook</a></li>
+<li><a href="airflow/hooks/jdbc_hook.html">airflow.hooks.jdbc_hook</a></li>
 <li><a href="airflow/hooks/mssql_hook.html">airflow.hooks.mssql_hook</a></li>
 <li><a href="airflow/hooks/mysql_hook.html">airflow.hooks.mysql_hook</a></li>
 <li><a href="airflow/hooks/oracle_hook.html">airflow.hooks.oracle_hook</a></li>
 <li><a href="airflow/hooks/pig_hook.html">airflow.hooks.pig_hook</a></li>
 <li><a href="airflow/hooks/postgres_hook.html">airflow.hooks.postgres_hook</a></li>
 <li><a href="airflow/hooks/presto_hook.html">airflow.hooks.presto_hook</a></li>
+<li><a href="airflow/hooks/samba_hook.html">airflow.hooks.samba_hook</a></li>
 <li><a href="airflow/hooks/slack_hook.html">airflow.hooks.slack_hook</a></li>
 <li><a href="airflow/hooks/sqlite_hook.html">airflow.hooks.sqlite_hook</a></li>
 <li><a href="airflow/hooks/webhdfs_hook.html">airflow.hooks.webhdfs_hook</a></li>
+<li><a href="airflow/hooks/zendesk_hook.html">airflow.hooks.zendesk_hook</a></li>
 <li><a href="airflow/macros.html">airflow.macros</a></li>
 <ul><li><a href="airflow/macros/hive.html">airflow.macros.hive</a></li>
 </ul><li><a href="airflow/models.html">airflow.models</a></li>
 <li><a href="airflow/operators/bash_operator.html">airflow.operators.bash_operator</a></li>
 <li><a href="airflow/operators/check_operator.html">airflow.operators.check_operator</a></li>
 <li><a href="airflow/operators/dagrun_operator.html">airflow.operators.dagrun_operator</a></li>
+<li><a href="airflow/operators/docker_operator.html">airflow.operators.docker_operator</a></li>
+<li><a href="airflow/operators/druid_check_operator.html">airflow.operators.druid_check_operator</a></li>
 <li><a href="airflow/operators/dummy_operator.html">airflow.operators.dummy_operator</a></li>
 <li><a href="airflow/operators/email_operator.html">airflow.operators.email_operator</a></li>
 <li><a href="airflow/operators/generic_transfer.html">airflow.operators.generic_transfer</a></li>
+<li><a href="airflow/operators/hive_operator.html">airflow.operators.hive_operator</a></li>
+<li><a href="airflow/operators/hive_stats_operator.html">airflow.operators.hive_stats_operator</a></li>
+<li><a href="airflow/operators/hive_to_druid.html">airflow.operators.hive_to_druid</a></li>
+<li><a href="airflow/operators/hive_to_mysql.html">airflow.operators.hive_to_mysql</a></li>
+<li><a href="airflow/operators/hive_to_samba_operator.html">airflow.operators.hive_to_samba_operator</a></li>
 <li><a href="airflow/operators/http_operator.html">airflow.operators.http_operator</a></li>
+<li><a href="airflow/operators/jdbc_operator.html">airflow.operators.jdbc_operator</a></li>
 <li><a href="airflow/operators/latest_only_operator.html">airflow.operators.latest_only_operator</a></li>
 <li><a href="airflow/operators/mssql_operator.html">airflow.operators.mssql_operator</a></li>
+<li><a href="airflow/operators/mssql_to_hive.html">airflow.operators.mssql_to_hive</a></li>
 <li><a href="airflow/operators/mysql_operator.html">airflow.operators.mysql_operator</a></li>
+<li><a href="airflow/operators/mysql_to_hive.html">airflow.operators.mysql_to_hive</a></li>
 <li><a href="airflow/operators/oracle_operator.html">airflow.operators.oracle_operator</a></li>
 <li><a href="airflow/operators/pig_operator.html">airflow.operators.pig_operator</a></li>
 <li><a href="airflow/operators/postgres_operator.html">airflow.operators.postgres_operator</a></li>
@@ -298,6 +324,7 @@
 <li><a href="airflow/operators/python_operator.html">airflow.operators.python_operator</a></li>
 <li><a href="airflow/operators/redshift_to_s3_operator.html">airflow.operators.redshift_to_s3_operator</a></li>
 <li><a href="airflow/operators/s3_file_transform_operator.html">airflow.operators.s3_file_transform_operator</a></li>
+<li><a href="airflow/operators/s3_to_hive_operator.html">airflow.operators.s3_to_hive_operator</a></li>
 <li><a href="airflow/operators/s3_to_redshift_operator.html">airflow.operators.s3_to_redshift_operator</a></li>
 <li><a href="airflow/operators/slack_operator.html">airflow.operators.slack_operator</a></li>
 <li><a href="airflow/operators/sqlite_operator.html">airflow.operators.sqlite_operator</a></li>
@@ -318,9 +345,7 @@
 </ul>
 
            </div>
-           <div class="articleComments">
-            
-           </div>
+           
           </div>
           <footer>
   
@@ -332,7 +357,7 @@
 
     </p>
   </div>
-  Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/snide/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>. 
+  Built with <a href="http://sphinx-doc.org/">Sphinx</a> using a <a href="https://github.com/rtfd/sphinx_rtd_theme">theme</a> provided by <a href="https://readthedocs.org">Read the Docs</a>. 
 
 </footer>
 
@@ -351,6 +376,7 @@
         var DOCUMENTATION_OPTIONS = {
             URL_ROOT:'../',
             VERSION:'',
+            LANGUAGE:'None',
             COLLAPSE_INDEX:false,
             FILE_SUFFIX:'.html',
             HAS_SOURCE:  true,
@@ -363,19 +389,13 @@
 
   
 
-  
-  
-    <script type="text/javascript" src="../_static/js/theme.js"></script>
-  
+  <script type="text/javascript" src="../_static/js/theme.js"></script>
 
-  
-  
   <script type="text/javascript">
       jQuery(function () {
-          SphinxRtdTheme.StickyNav.enable();
+          SphinxRtdTheme.Navigation.enable(true);
       });
-  </script>
-   
+  </script> 
 
 </body>
 </html>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/code.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/code.rst.txt b/_sources/code.rst.txt
index f055fc6..80ec761 100644
--- a/_sources/code.rst.txt
+++ b/_sources/code.rst.txt
@@ -1,6 +1,8 @@
 API Reference
 =============
 
+.. _api-reference-operators:
+
 Operators
 ---------
 Operators allow for generation of certain types of tasks that become nodes in
@@ -117,6 +119,7 @@ Operators
 .. autoclass:: airflow.contrib.operators.bigquery_get_data.BigQueryGetDataOperator
 .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateEmptyTableOperator
 .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator
+.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator
 .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryOperator
 .. autoclass:: airflow.contrib.operators.bigquery_table_delete_operator.BigQueryTableDeleteOperator
 .. autoclass:: airflow.contrib.operators.bigquery_to_bigquery.BigQueryToBigQueryOperator
@@ -150,6 +153,7 @@ Operators
 .. autoclass:: airflow.contrib.operators.file_to_wasb.FileToWasbOperator
 .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterCreateOperator
 .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator
+.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEPodOperator
 .. autoclass:: airflow.contrib.operators.gcs_download_operator.GoogleCloudStorageDownloadOperator
 .. autoclass:: airflow.contrib.operators.gcs_list_operator.GoogleCloudStorageListOperator
 .. autoclass:: airflow.contrib.operators.gcs_operator.GoogleCloudStorageCreateBucketOperator
@@ -168,6 +172,8 @@ Operators
 .. autoclass:: airflow.contrib.operators.mlengine_operator.MLEngineTrainingOperator
 .. autoclass:: airflow.contrib.operators.mongo_to_s3.MongoToS3Operator
 .. autoclass:: airflow.contrib.operators.mysql_to_gcs.MySqlToGoogleCloudStorageOperator
+.. autoclass:: airflow.contrib.operators.oracle_to_azure_data_lake_transfer.OracleToAzureDataLakeTransfer
+.. autoclass:: airflow.contrib.operators.oracle_to_oracle_transfer.OracleToOracleTransfer
 .. autoclass:: airflow.contrib.operators.postgres_to_gcs_operator.PostgresToGoogleCloudStorageOperator
 .. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubTopicCreateOperator
 .. autoclass:: airflow.contrib.operators.pubsub_operator.PubSubTopicDeleteOperator
@@ -198,7 +204,8 @@ Sensors
 .. autoclass:: airflow.contrib.sensors.aws_redshift_cluster_sensor.AwsRedshiftClusterSensor
 .. autoclass:: airflow.contrib.sensors.bash_sensor.BashSensor
 .. autoclass:: airflow.contrib.sensors.bigquery_sensor.BigQueryTableSensor
-.. autoclass:: airflow.contrib.sensors.cassandra_sensor.CassandraRecordSensor
+.. autoclass:: airflow.contrib.sensors.cassandra_record_sensor.CassandraRecordSensor
+.. autoclass:: airflow.contrib.sensors.cassandra_table_sensor.CassandraTableSensor
 .. autoclass:: airflow.contrib.sensors.datadog_sensor.DatadogSensor
 .. autoclass:: airflow.contrib.sensors.emr_base_sensor.EmrBaseSensor
 .. autoclass:: airflow.contrib.sensors.emr_job_flow_sensor.EmrJobFlowSensor

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/concepts.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/concepts.rst.txt b/_sources/concepts.rst.txt
index 9b10224..50c18c9 100644
--- a/_sources/concepts.rst.txt
+++ b/_sources/concepts.rst.txt
@@ -90,6 +90,8 @@ DAGs can be used as context managers to automatically assign new operators to th
 
     op.dag is dag # True
 
+.. _concepts-operators:
+
 Operators
 =========
 
@@ -113,13 +115,12 @@ Airflow provides operators for many common tasks, including:
 - ``BashOperator`` - executes a bash command
 - ``PythonOperator`` - calls an arbitrary Python function
 - ``EmailOperator`` - sends an email
-- ``HTTPOperator`` - sends an HTTP request
+- ``SimpleHttpOperator`` - sends an HTTP request
 - ``MySqlOperator``, ``SqliteOperator``, ``PostgresOperator``, ``MsSqlOperator``, ``OracleOperator``, ``JdbcOperator``, etc. - executes a SQL command
 - ``Sensor`` - waits for a certain time, file, database row, S3 key, etc...
 
-
 In addition to these basic building blocks, there are many more specific
-operators: ``DockerOperator``, ``HiveOperator``, ``S3FileTransferOperator``,
+operators: ``DockerOperator``, ``HiveOperator``, ``S3FileTransformOperator``,
 ``PrestoToMysqlOperator``, ``SlackOperator``... you get the idea!
 
 The ``airflow/contrib/`` directory contains yet more operators built by the
@@ -129,6 +130,8 @@ the platform.
 
 Operators are only loaded by Airflow if they are assigned to a DAG.
 
+See :doc:`howto/operator` for how to use Airflow operators.
+
 DAG Assignment
 --------------
 
@@ -329,6 +332,17 @@ from ``BaseHook``, Airflow will choose one connection randomly, allowing
 for some basic load balancing and fault tolerance when used in conjunction
 with retries.
 
+Airflow also has the ability to reference connections via environment
+variables from the operating system. But it only supports URI format. If you
+need to specify ``extra`` for your connection, please use web UI.
+
+If connections with the same ``conn_id`` are defined in both Airflow metadata
+database and environment variables, only the one in environment variables
+will be referenced by Airflow (for example, given ``conn_id`` ``postgres_master``,
+Airflow will search for ``AIRFLOW_CONN_POSTGRES_MASTER``
+in environment variables first and directly reference it if found,
+before it starts to search in metadata database).
+
 Many hooks have a default ``conn_id``, where operators using that hook do not
 need to supply an explicit connection ID. For example, the default
 ``conn_id`` for the :class:`~airflow.hooks.postgres_hook.PostgresHook` is

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/howto/index.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/index.rst.txt b/_sources/howto/index.rst.txt
index 1342ed8..f9f160e 100644
--- a/_sources/howto/index.rst.txt
+++ b/_sources/howto/index.rst.txt
@@ -12,6 +12,7 @@ configuring an Airflow environment.
 
     set-config
     initialize-database
+    operator
     manage-connections
     secure-connections
     write-logs

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/howto/operator.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/operator.rst.txt b/_sources/howto/operator.rst.txt
new file mode 100644
index 0000000..efd544e
--- /dev/null
+++ b/_sources/howto/operator.rst.txt
@@ -0,0 +1,103 @@
+Using Operators
+===============
+
+An operator represents a single, ideally idempotent, task. Operators
+determine what actually executes when your DAG runs.
+
+See the :ref:`Operators Concepts <concepts-operators>` documentation and the
+:ref:`Operators API Reference <api-reference-operators>` for more
+information.
+
+.. contents:: :local:
+
+BashOperator
+------------
+
+Use the :class:`~airflow.operators.bash_operator.BashOperator` to execute
+commands in a `Bash <https://www.gnu.org/software/bash/>`__ shell.
+
+.. literalinclude:: ../../airflow/example_dags/example_bash_operator.py
+    :language: python
+    :start-after: [START howto_operator_bash]
+    :end-before: [END howto_operator_bash]
+
+Templating
+^^^^^^^^^^
+
+You can use :ref:`Jinja templates <jinja-templating>` to parameterize the
+``bash_command`` argument.
+
+.. literalinclude:: ../../airflow/example_dags/example_bash_operator.py
+    :language: python
+    :start-after: [START howto_operator_bash_template]
+    :end-before: [END howto_operator_bash_template]
+
+Troubleshooting
+^^^^^^^^^^^^^^^
+
+Jinja template not found
+""""""""""""""""""""""""
+
+Add a space after the script name when directly calling a Bash script with
+the ``bash_command`` argument. This is because Airflow tries to apply a Jinja
+template to it, which will fail.
+
+.. code-block:: python
+
+    t2 = BashOperator(
+        task_id='bash_example',
+
+        # This fails with `Jinja template not found` error
+        # bash_command="/home/batcher/test.sh",
+
+        # This works (has a space after)
+        bash_command="/home/batcher/test.sh ",
+        dag=dag)
+
+PythonOperator
+--------------
+
+Use the :class:`~airflow.operators.python_operator.PythonOperator` to execute
+Python callables.
+
+.. literalinclude:: ../../airflow/example_dags/example_python_operator.py
+    :language: python
+    :start-after: [START howto_operator_python]
+    :end-before: [END howto_operator_python]
+
+Passing in arguments
+^^^^^^^^^^^^^^^^^^^^
+
+Use the ``op_args`` and ``op_kwargs`` arguments to pass additional arguments
+to the Python callable.
+
+.. literalinclude:: ../../airflow/example_dags/example_python_operator.py
+    :language: python
+    :start-after: [START howto_operator_python_kwargs]
+    :end-before: [END howto_operator_python_kwargs]
+
+Templating
+^^^^^^^^^^
+
+When you set the ``provide_context`` argument to ``True``, Airflow passes in
+an additional set of keyword arguments: one for each of the :ref:`Jinja
+template variables <macros>` and a ``templates_dict`` argument.
+
+The ``templates_dict`` argument is templated, so each value in the dictionary
+is evaluated as a :ref:`Jinja template <jinja-templating>`.
+
+Google Cloud Platform Operators
+-------------------------------
+
+GoogleCloudStorageToBigQueryOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Use the
+:class:`~airflow.contrib.operators.gcs_to_bq.GoogleCloudStorageToBigQueryOperator`
+to execute a BigQuery load job.
+
+.. literalinclude:: ../../airflow/contrib/example_dags/example_gcs_to_bq_operator.py
+    :language: python
+    :dedent: 4
+    :start-after: [START howto_operator_gcs_to_bq]
+    :end-before: [END howto_operator_gcs_to_bq]

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/howto/write-logs.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/howto/write-logs.rst.txt b/_sources/howto/write-logs.rst.txt
index a29f606..ac30e84 100644
--- a/_sources/howto/write-logs.rst.txt
+++ b/_sources/howto/write-logs.rst.txt
@@ -11,7 +11,7 @@ directory.
 In addition, users can supply a remote location for storing logs and log
 backups in cloud storage.
 
-In the Airflow Web UI, local logs take precedance over remote logs. If local logs
+In the Airflow Web UI, local logs take precedence over remote logs. If local logs
 can not be found or accessed, the remote logs will be displayed. Note that logs
 are only sent to remote storage once a task completes (including failure). In other
 words, remote logs for running tasks are unavailable. Logs are stored in the log
@@ -111,7 +111,7 @@ Follow the steps below to enable Google Cloud Storage logging.
             'filename_template': FILENAME_TEMPLATE,
         },
 
-        # Update the airflow.task and airflow.tas_runner blocks to be 'gcs.task' instead of 'file.task'.
+        # Update the airflow.task and airflow.task_runner blocks to be 'gcs.task' instead of 'file.task'.
         'loggers': {
             'airflow.task': {
                 'handlers': ['gcs.task'],

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/installation.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/installation.rst.txt b/_sources/installation.rst.txt
index 9e9ab3e..6d32c07 100644
--- a/_sources/installation.rst.txt
+++ b/_sources/installation.rst.txt
@@ -14,7 +14,7 @@ You can also install Airflow with support for extra features like ``s3`` or ``po
 
 .. code-block:: bash
 
-    pip install "apache-airflow[s3, postgres]"
+    pip install apache-airflow[postgres,s3]
 
 .. note:: GPL dependency
 
@@ -41,66 +41,67 @@ Here's the list of the subpackages and what they enable:
 +---------------+----------------------------------------------+-------------------------------------------------+
 | subpackage    | install command                              | enables                                         |
 +===============+==============================================+=================================================+
-|  all          | ``pip install apache-airflow[all]``          | All Airflow features known to man               |
+| all           | ``pip install apache-airflow[all]``          | All Airflow features known to man               |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  all_dbs      | ``pip install apache-airflow[all_dbs]``      | All databases integrations                      |
+| all_dbs       | ``pip install apache-airflow[all_dbs]``      | All databases integrations                      |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  async        | ``pip install apache-airflow[async]``        | Async worker classes for gunicorn               |
+| async         | ``pip install apache-airflow[async]``        | Async worker classes for Gunicorn               |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  devel        | ``pip install apache-airflow[devel]``        | Minimum dev tools requirements                  |
+| celery        | ``pip install apache-airflow[celery]``       | CeleryExecutor                                  |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  devel_hadoop | ``pip install apache-airflow[devel_hadoop]`` | Airflow + dependencies on the Hadoop stack      |
+| cloudant      | ``pip install apache-airflow[cloudant]``     | Cloudant hook                                   |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  celery       | ``pip install apache-airflow[celery]``       | CeleryExecutor                                  |
+| crypto        | ``pip install apache-airflow[crypto]``       | Encrypt connection passwords in metadata db     |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  crypto       | ``pip install apache-airflow[crypto]``       | Encrypt connection passwords in metadata db     |
+| devel         | ``pip install apache-airflow[devel]``        | Minimum dev tools requirements                  |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  druid        | ``pip install apache-airflow[druid]``        | Druid.io related operators & hooks              |
+| devel_hadoop  | ``pip install apache-airflow[devel_hadoop]`` | Airflow + dependencies on the Hadoop stack      |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  gcp_api      | ``pip install apache-airflow[gcp_api]``      | Google Cloud Platform hooks and operators       |
+| druid         | ``pip install apache-airflow[druid]``        | Druid related operators & hooks                 |
++---------------+----------------------------------------------+-------------------------------------------------+
+| gcp_api       | ``pip install apache-airflow[gcp_api]``      | Google Cloud Platform hooks and operators       |
 |               |                                              | (using ``google-api-python-client``)            |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  jdbc         | ``pip install apache-airflow[jdbc]``         | JDBC hooks and operators                        |
+| hdfs          | ``pip install apache-airflow[hdfs]``         | HDFS hooks and operators                        |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  hdfs         | ``pip install apache-airflow[hdfs]``         | HDFS hooks and operators                        |
+| hive          | ``pip install apache-airflow[hive]``         | All Hive related operators                      |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  hive         | ``pip install apache-airflow[hive]``         | All Hive related operators                      |
+| jdbc          | ``pip install apache-airflow[jdbc]``         | JDBC hooks and operators                        |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  kerberos     | ``pip install apache-airflow[kerberos]``     | kerberos integration for kerberized hadoop      |
+| kerbero s     | ``pip install apache-airflow[kerberos]``     | Kerberos integration for Kerberized Hadoop      |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  ldap         | ``pip install apache-airflow[ldap]``         | ldap authentication for users                   |
+| ldap          | ``pip install apache-airflow[ldap]``         | LDAP authentication for users                   |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  mssql        | ``pip install apache-airflow[mssql]``        | Microsoft SQL operators and hook,               |
+| mssql         | ``pip install apache-airflow[mssql]``        | Microsoft SQL Server operators and hook,        |
 |               |                                              | support as an Airflow backend                   |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  mysql        | ``pip install apache-airflow[mysql]``        | MySQL operators and hook, support as            |
-|               |                                              | an Airflow backend. The version of MySQL server |
-|               |                                              | has to be 5.6.4+. The exact version upper bound |
-|               |                                              | depends on version of ``mysqlclient`` package.  |
-|               |                                              | For example, ``mysqlclient`` 1.3.12 can only be |
+| mysql         | ``pip install apache-airflow[mysql]``        | MySQL operators and hook, support as an Airflow |
+|               |                                              | backend. The version of MySQL server has to be  |
+|               |                                              | 5.6.4+. The exact version upper bound depends   |
+|               |                                              | on version of ``mysqlclient`` package. For      |
+|               |                                              | example, ``mysqlclient`` 1.3.12 can only be     |
 |               |                                              | used with MySQL server 5.6.4 through 5.7.       |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  password     | ``pip install apache-airflow[password]``     | Password Authentication for users               |
+| password      | ``pip install apache-airflow[password]``     | Password authentication for users               |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  postgres     | ``pip install apache-airflow[postgres]``     | Postgres operators and hook, support            |
-|               |                                              | as an Airflow backend                           |
+| postgres      | ``pip install apache-airflow[postgres]``     | PostgreSQL operators and hook, support as an    |
+|               |                                              | Airflow backend                                 |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  qds          | ``pip install apache-airflow[qds]``          | Enable QDS (qubole data services) support       |
+| qds           | ``pip install apache-airflow[qds]``          | Enable QDS (Qubole Data Service) support        |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  rabbitmq     | ``pip install apache-airflow[rabbitmq]``     | Rabbitmq support as a Celery backend            |
+| rabbitmq      | ``pip install apache-airflow[rabbitmq]``     | RabbitMQ support as a Celery backend            |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  s3           | ``pip install apache-airflow[s3]``           | ``S3KeySensor``, ``S3PrefixSensor``             |
+| redis         | ``pip install apache-airflow[redis]``        | Redis hooks and sensors                         |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  samba        | ``pip install apache-airflow[samba]``        | ``Hive2SambaOperator``                          |
+| s3            | ``pip install apache-airflow[s3]``           | ``S3KeySensor``, ``S3PrefixSensor``             |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  slack        | ``pip install apache-airflow[slack]``        | ``SlackAPIPostOperator``                        |
+| samba         | ``pip install apache-airflow[samba]``        | ``Hive2SambaOperator``                          |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  vertica      | ``pip install apache-airflow[vertica]``      | Vertica hook                                    |
-|               |                                              | support as an Airflow backend                   |
+| slack         | ``pip install apache-airflow[slack]``        | ``SlackAPIPostOperator``                        |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  cloudant     | ``pip install apache-airflow[cloudant]``     | Cloudant hook                                   |
+| ssh           | ``pip install apache-airflow[ssh]``          | SSH hooks and Operator                          |
 +---------------+----------------------------------------------+-------------------------------------------------+
-|  redis        | ``pip install apache-airflow[redis]``        | Redis hooks and sensors                         |
+| vertica       | ``pip install apache-airflow[vertica]``      | Vertica hook support as an Airflow backend      |
 +---------------+----------------------------------------------+-------------------------------------------------+
 
 Initiating Airflow Database

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/integration.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/integration.rst.txt b/_sources/integration.rst.txt
index 660b216..4c513bf 100644
--- a/_sources/integration.rst.txt
+++ b/_sources/integration.rst.txt
@@ -118,9 +118,9 @@ WasbHook
 Azure File Share
 ''''''''''''''''
 
-Cloud variant of a SMB file share. Make sure that a Airflow connection of 
-type `wasb` exists. Authorization can be done by supplying a login (=Storage account name) 
-and password (=Storage account key), or login and SAS token in the extra field 
+Cloud variant of a SMB file share. Make sure that a Airflow connection of
+type `wasb` exists. Authorization can be done by supplying a login (=Storage account name)
+and password (=Storage account key), or login and SAS token in the extra field
 (see connection `wasb_default` for an example).
 
 AzureFileShareHook
@@ -349,6 +349,7 @@ BigQuery Operators
 - :ref:`BigQueryIntervalCheckOperator` : Checks that the values of metrics given as SQL expressions are within a certain tolerance of the ones from days_back before.
 - :ref:`BigQueryCreateEmptyTableOperator` : Creates a new, empty table in the specified BigQuery dataset optionally with schema.
 - :ref:`BigQueryCreateExternalTableOperator` : Creates a new, external table in the dataset with the data in Google Cloud Storage.
+- :ref:`BigQueryDeleteDatasetOperator` : Deletes an existing BigQuery dataset.
 - :ref:`BigQueryOperator` : Executes BigQuery SQL queries in a specific BigQuery database.
 - :ref:`BigQueryToBigQueryOperator` : Copy a BigQuery table to another BigQuery table.
 - :ref:`BigQueryToCloudStorageOperator` : Transfers a BigQuery table to a Google Cloud Storage bucket
@@ -396,6 +397,13 @@ BigQueryCreateExternalTableOperator
 
 .. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryCreateExternalTableOperator
 
+.. _BigQueryDeleteDatasetOperator:
+
+BigQueryDeleteDatasetOperator
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.operators.bigquery_operator.BigQueryDeleteDatasetOperator
+
 .. _BigQueryOperator:
 
 BigQueryOperator
@@ -768,6 +776,12 @@ GKEClusterDeleteOperator
 .. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEClusterDeleteOperator
 .. _GKEClusterDeleteOperator:
 
+GKEPodOperator
+^^^^^^^^^^^^^^
+
+.. autoclass:: airflow.contrib.operators.gcp_container_operator.GKEPodOperator
+.. _GKEPodOperator:
+
 Google Kubernetes Engine Hook
 """""""""""""""""""""""""""""
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/plugins.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/plugins.rst.txt b/_sources/plugins.rst.txt
index a21f100..3f1f7ee 100644
--- a/_sources/plugins.rst.txt
+++ b/_sources/plugins.rst.txt
@@ -78,6 +78,26 @@ looks like:
         menu_links = []
 
 
+You can derive it by inheritance (please refer to the example below).
+Please note ``name`` inside this class must be specified.
+
+After the plugin is imported into Airflow,
+you can invoke it using statement like
+
+
+.. code:: python
+
+    from airflow.{type, like "operators", "sensors"}.{name specificed inside the plugin class} import *
+
+
+When you write your own plugins, make sure you understand them well.
+There are some essential properties for each type of plugin.
+For example,
+
+* For ``Operator`` plugin, an ``execute`` method is compulsory.
+* For ``Sensor`` plugin, a ``poke`` method returning a Boolean value is compulsory.
+
+
 Example
 -------
 

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/project.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/project.rst.txt b/_sources/project.rst.txt
index d1f2cc0..cd3b60f 100644
--- a/_sources/project.rst.txt
+++ b/_sources/project.rst.txt
@@ -30,6 +30,7 @@ Committers
 - @fokko (Fokko Driesprong)
 - @ash (Ash Berlin-Taylor)
 - @kaxilnaik (Kaxil Naik)
+- @feng-tao (Tao Feng)
 
 For the full list of contributors, take a look at `Airflow's Github
 Contributor page:

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/scheduler.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/scheduler.rst.txt b/_sources/scheduler.rst.txt
index dfa0a42..3e89589 100644
--- a/_sources/scheduler.rst.txt
+++ b/_sources/scheduler.rst.txt
@@ -13,7 +13,7 @@ execute ``airflow scheduler``. It will use the configuration specified in
 ``airflow.cfg``.
 
 Note that if you run a DAG on a ``schedule_interval`` of one day,
-the run stamped ``2016-01-01`` will be trigger soon after ``2016-01-01T23:59``.
+the run stamped ``2016-01-01`` will be triggered soon after ``2016-01-01T23:59``.
 In other words, the job instance is started once the period it covers
 has ended.
 
@@ -134,6 +134,8 @@ specific ``run_id``. The ``DAG Runs`` created externally to the
 scheduler get associated to the trigger's timestamp, and will be displayed
 in the UI alongside scheduled ``DAG runs``.
 
+In addition, you can also manually trigger a ``DAG Run`` using the web UI (tab "DAGs" -> column "Links" -> button "Trigger Dag").
+
 
 To Keep in Mind
 '''''''''''''''
@@ -158,6 +160,7 @@ Here are some of the ways you can **unblock tasks**:
   states (``failed``, or ``success``)
 * Clearing a task instance will no longer delete the task instance record. Instead it updates
   max_tries and set the current task instance state to be None.
+* Marking task instances as failed can be done through the UI. This can be used to stop running task instances.
 * Marking task instances as successful can be done through the UI. This is mostly to fix false negatives,
   or for instance when the fix has been applied outside of Airflow.
 * The ``airflow backfill`` CLI subcommand has a flag to ``--mark_success`` and allows selecting

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_sources/security.rst.txt
----------------------------------------------------------------------
diff --git a/_sources/security.rst.txt b/_sources/security.rst.txt
index 3c328d8..253587a 100644
--- a/_sources/security.rst.txt
+++ b/_sources/security.rst.txt
@@ -280,7 +280,7 @@ Google Authentication
 '''''''''''''''''''''
 
 The Google authentication backend can be used to authenticate users
-against Google using OAuth2. You must specify the domains to restrict
+against Google using OAuth2. You must specify the email domains to restrict
 login, separated with a comma, to only members of those domains.
 
 .. code-block:: bash

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_static/basic.css
----------------------------------------------------------------------
diff --git a/_static/basic.css b/_static/basic.css
index 6f40830..19ced10 100644
--- a/_static/basic.css
+++ b/_static/basic.css
@@ -82,9 +82,21 @@ div.sphinxsidebar input {
 }
 
 div.sphinxsidebar #searchbox input[type="text"] {
-    width: 170px;
+    float: left;
+    width: 80%;
+    padding: 0.25em;
+    box-sizing: border-box;
+}
+
+div.sphinxsidebar #searchbox input[type="submit"] {
+    float: left;
+    width: 20%;
+    border-left: none;
+    padding: 0.25em;
+    box-sizing: border-box;
 }
 
+
 img {
     border: 0;
     max-width: 100%;
@@ -199,6 +211,11 @@ table.modindextable td {
 
 /* -- general body styles --------------------------------------------------- */
 
+div.body {
+    min-width: 450px;
+    max-width: 800px;
+}
+
 div.body p, div.body dd, div.body li, div.body blockquote {
     -moz-hyphens: auto;
     -ms-hyphens: auto;
@@ -332,6 +349,11 @@ table.docutils {
     border-collapse: collapse;
 }
 
+table.align-center {
+    margin-left: auto;
+    margin-right: auto;
+}
+
 table caption span.caption-number {
     font-style: italic;
 }

http://git-wip-us.apache.org/repos/asf/incubator-airflow-site/blob/7d4d7628/_static/css/badge_only.css
----------------------------------------------------------------------
diff --git a/_static/css/badge_only.css b/_static/css/badge_only.css
index 6362912..323730a 100644
--- a/_static/css/badge_only.css
+++ b/_static/css/badge_only.css
@@ -1,2 +1 @@
-.fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../font/fontawesome_webfont.eot");src:url("../font/fontawesome_webfont.eot?#iefix") format("embedded-opentype"),url("../font/fontawesome_webfont.woff") format("woff"),url("../font/fontawesome_webfont.ttf") format("truetype"),url("../font/fontawesome_webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:0.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:be
 fore{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;border-top:solid 10px #343131;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-cu
 rrent-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-ver
 sion .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}
-/*# sourceMappingURL=badge_only.css.map */
+.fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../fonts/fontawesome-webfont.eot");src:url("../fonts/fontawesome-webfont.eot?#iefix") format("embedded-opentype"),url("../fonts/fontawesome-webfont.woff") format("woff"),url("../fonts/fontawesome-webfont.ttf") format("truetype"),url("../fonts/fontawesome-webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-boo
 k:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{flo
 at:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up{height:auto;max-height:100%}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.
 shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}