You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@druid.apache.org by jo...@apache.org on 2020/01/22 07:33:20 UTC

[druid-website] 01/01: Add 0.17.0 rc1 docs

This is an automated email from the ASF dual-hosted git repository.

jonwei pushed a commit to branch 0.17.0-docs
in repository https://gitbox.apache.org/repos/asf/druid-website.git

commit a9064a8c2e9463b021d42965710eb12b93dcefeb
Author: jon-wei <jo...@imply.io>
AuthorDate: Tue Jan 21 23:32:34 2020 -0800

    Add 0.17.0 rc1 docs
---
 css/base.css                                       |  207 ---
 css/blogs.css                                      |   68 -
 css/bootstrap-pure.css                             | 1855 --------------------
 css/docs.css                                       |  126 --
 css/footer.css                                     |   29 -
 css/header.css                                     |  110 --
 css/index.css                                      |   50 -
 css/news-list.css                                  |   63 -
 css/reset.css                                      |   44 -
 css/syntax.css                                     |  281 ---
 css/variables.css                                  |    0
 docs/0.17.0/design/architecture.html               |    2 +-
 docs/0.17.0/development/extensions-core/avro.html  |   13 +-
 .../0.17.0/development/extensions-core/google.html |   24 +-
 docs/0.17.0/development/extensions-core/hdfs.html  |  117 +-
 .../extensions-core/kafka-ingestion.html           |    2 +-
 .../extensions-core/kinesis-ingestion.html         |    2 +-
 docs/0.17.0/development/extensions-core/mysql.html |    6 +-
 docs/0.17.0/development/extensions-core/orc.html   |   10 +-
 .../development/extensions-core/postgresql.html    |    6 +-
 docs/0.17.0/development/extensions-core/s3.html    |   85 +-
 docs/0.17.0/development/modules.html               |    4 +-
 docs/0.17.0/ingestion/data-formats.html            |   25 +-
 docs/0.17.0/ingestion/hadoop.html                  |   29 +-
 docs/0.17.0/ingestion/index.html                   |    8 +-
 docs/0.17.0/ingestion/native-batch.html            |    7 +
 docs/0.17.0/operations/other-hadoop.html           |    2 +-
 docs/0.17.0/querying/aggregations.html             |   18 +-
 docs/0.17.0/querying/sql.html                      |    4 +-
 docs/0.17.0/tutorials/tutorial-batch.html          |   69 +-
 docs/0.17.0/tutorials/tutorial-compaction.html     |    4 +-
 docs/0.17.0/tutorials/tutorial-ingestion-spec.html |  333 ++--
 docs/0.17.0/tutorials/tutorial-kafka.html          |   61 +-
 docs/0.17.0/tutorials/tutorial-rollup.html         |   35 +-
 docs/0.17.0/tutorials/tutorial-transform-spec.html |   35 +-
 docs/latest/design/architecture.html               |    2 +-
 docs/latest/development/extensions-core/avro.html  |   13 +-
 .../latest/development/extensions-core/google.html |   24 +-
 docs/latest/development/extensions-core/hdfs.html  |  117 +-
 .../extensions-core/kafka-ingestion.html           |    2 +-
 .../extensions-core/kinesis-ingestion.html         |    2 +-
 docs/latest/development/extensions-core/mysql.html |    6 +-
 docs/latest/development/extensions-core/orc.html   |   10 +-
 .../development/extensions-core/postgresql.html    |    6 +-
 docs/latest/development/extensions-core/s3.html    |   85 +-
 docs/latest/development/modules.html               |    4 +-
 docs/latest/ingestion/data-formats.html            |   25 +-
 docs/latest/ingestion/hadoop.html                  |   29 +-
 docs/latest/ingestion/index.html                   |    8 +-
 docs/latest/ingestion/native-batch.html            |    7 +
 docs/latest/operations/other-hadoop.html           |    2 +-
 docs/latest/querying/aggregations.html             |   18 +-
 docs/latest/querying/sql.html                      |    4 +-
 docs/latest/tutorials/tutorial-batch.html          |   69 +-
 docs/latest/tutorials/tutorial-compaction.html     |    4 +-
 docs/latest/tutorials/tutorial-ingestion-spec.html |  333 ++--
 docs/latest/tutorials/tutorial-kafka.html          |   61 +-
 docs/latest/tutorials/tutorial-rollup.html         |   35 +-
 docs/latest/tutorials/tutorial-transform-spec.html |   35 +-
 59 files changed, 990 insertions(+), 3645 deletions(-)

diff --git a/css/base.css b/css/base.css
deleted file mode 100644
index a256214..0000000
--- a/css/base.css
+++ /dev/null
@@ -1,207 +0,0 @@
-html, body, div, span, applet, object, iframe, h1, h2, h3, h4, h5, h6, p, blockquote, a, abbr, acronym, address, big, cite, code, del, dfn, em, img, ins, kbd, q, s, samp, small, strike, strong, sub, sup, tt, var, b, u, i, center, dl, dt, dd, ol, ul, li, fieldset, form, label, legend, table, caption, tbody, tfoot, thead, tr, th, td, article, aside, canvas, details, embed, figure, figcaption, footer, header, menu, nav, output, ruby, section, summary, time, mark, audio, video {
-  margin: 0;
-  padding: 0;
-  border: 0;
-  font-size: 100%;
-  font: inherit;
-  vertical-align: baseline; }
-
-article, aside, details, figcaption, figure, footer, header, menu, nav, section {
-  display: block; }
-
-body {
-  line-height: 1; }
-
-ol, ul {
-  list-style: none; }
-
-table {
-  border-collapse: collapse;
-  border-spacing: 0; }
-
-* {
-  box-sizing: border-box;
-  vertical-align: top; }
-
-body * {
-  position: relative; }
-
-a {
-  text-decoration: inherit;
-  color: inherit;
-  cursor: inherit; }
-
-div, span {
-  cursor: inherit; }
-
-text {
-  cursor: default; }
-
-button, input, textarea {
-  border: 0;
-  margin: 0; }
-  button:focus, input:focus, textarea:focus {
-    outline: none; }
-
-body {
-  margin-top: 54px; }
-
-html, body {
-  position: relative;
-  color: #1C1C26;
-  font-family: 'Open Sans', Arial, Helvetica, sans-serif;
-  font-weight: 400;
-  font-size: 15px;
-  word-wrap: break-word; }
-  html h1, html h2, html h3, html h4, html h5, html h6, html .h1, html .h2, html .h3, html .h4, html .h5, html .h6, body h1, body h2, body h3, body h4, body h5, body h6, body .h1, body .h2, body .h3, body .h4, body .h5, body .h6 {
-    font-weight: 600;
-    line-height: 140%;
-    margin-bottom: 14px;
-    margin-top: 28px; }
-  html h1, body h1 {
-    font-size: 2.7em; }
-  html h2, body h2 {
-    font-size: 2.2em; }
-  html h3, body h3 {
-    font-size: 1.7em; }
-  html h4, body h4 {
-    font-size: 1.4em; }
-  html h5, body h5 {
-    font-weight: bold; }
-  html h6, body h6 {
-    font-weight: bold;
-    color: #999999; }
-  html p, body p {
-    line-height: 170%;
-    margin-bottom: 14px;
-    margin-top: 14px; }
-    html p:first-child, body p:first-child {
-      margin-top: 28px; }
-  html strong, body strong {
-    font-weight: 700; }
-  html hr, body hr {
-    margin: 30px 0 30px 0; }
-  html a, body a {
-    cursor: pointer; }
-  html p a, html table a, html ul a, html li a, body p a, body table a, body ul a, body li a {
-    color: #4460de;
-    cursor: pointer;
-    font-weight: 600; }
-    html p a:hover, html table a:hover, html ul a:hover, html li a:hover, body p a:hover, body table a:hover, body ul a:hover, body li a:hover {
-      text-decoration: underline; }
-  html ul, body ul {
-    margin-top: 14px;
-    margin-bottom: 14px;
-    line-height: 170%; }
-    html ul li, body ul li {
-      margin-left: 18px;
-      list-style-type: disc; }
-  html ol, body ol {
-    margin-top: 14px;
-    margin-bottom: 14px;
-    line-height: 170%; }
-    html ol li, body ol li {
-      margin-left: 22px;
-      list-style-type: decimal; }
-  html table, body table {
-    width: 100%;
-    table-layout: fixed;
-    margin-top: 14px;
-    margin-bottom: 14px; }
-    html table th, body table th {
-      font-weight: 700; }
-    html table th, html table td, body table th, body table td {
-      padding: 5px; }
-  html blockquote, body blockquote {
-    font-size: 14px;
-    font-style: italic;
-    color: #777; }
-
-.druid-header {
-  padding: 24px 0 34px;
-  margin-bottom: 14px;
-  color: #1c1c26;
-  text-align: center;
-  background-image: url("/img/watermark-light.png");
-  background-size: 330px 330px;
-  background-repeat: no-repeat;
-  background-position: 18% -30px;
-  background-color: #e7e7ec;
-  overflow: hidden; }
-  .druid-header h1 {
-    margin-top: 14px;
-    font-size: 2.8em; }
-  .druid-header h4 {
-    font-weight: 400;
-    font-size: 15px;
-    margin-top: -5px;
-    margin-bottom: 0; }
-    .druid-header h4 a {
-      color: #4460de;
-      font-weight: 600; }
-      .druid-header h4 a .fa {
-        margin-right: 4px; }
-      .druid-header h4 a:hover {
-        text-decoration: underline; }
-
-.text-indent {
-  padding-left: 50px; }
-
-.text-indent-2 {
-  padding-left: 100px; }
-
-.text-indent-p p {
-  padding-left: 50px; }
-
-.image-large {
-  text-align: center;
-  margin-top: 28px;
-  margin-bottom: 28px; }
-  .image-large img {
-    width: 100%;
-    max-width: 660px; }
-
-.large-button {
-  display: inline-block;
-  padding: 10px 22px;
-  color: white;
-  background: #4460de;
-  border-radius: 2px;
-  font-size: 15px;
-  margin-top: 14px;
-  margin-bottom: 14px; }
-  .large-button .fa {
-    margin-right: 3px; }
-  .large-button:hover {
-    background: #2442cb;
-    text-decoration: none; }
-
-.features {
-  margin-bottom: 28px;
-  margin-top: 38px; }
-  .features .feature {
-    padding-left: 74px;
-    margin-bottom: 34px; }
-    .features .feature:nth-child(3n-2) .fa {
-      background: #cbd5ff; }
-    .features .feature:nth-child(3n-1) .fa {
-      background: #a7f1d9; }
-    .features .feature:nth-child(3n) .fa {
-      background: #ffd8a8; }
-    .features .feature .fa {
-      top: 0;
-      left: 0;
-      position: absolute;
-      width: 54px;
-      height: 54px;
-      line-height: 54px;
-      border-radius: 40px;
-      text-align: center;
-      font-size: 20px;
-      background: #cbd5ff;
-      color: #1C1C26; }
-    .features .feature p {
-      margin-top: 0; }
-    .features .feature h5 {
-      margin-bottom: 0;
-      font-size: 1.2em; }
diff --git a/css/blogs.css b/css/blogs.css
deleted file mode 100644
index 081b39e..0000000
--- a/css/blogs.css
+++ /dev/null
@@ -1,68 +0,0 @@
-.blog-listing {
-  margin-bottom: 70px; }
-
-.blog-listing h2 {
-  color: #333; }
-
-.blog-listing h2 a {
-  color: inherit;
-  color: #575B61; }
-
-.blog-listing h2 a:hover,
-.blog-listing h2 a:focus {
-  color: #b1bac6;
-  text-decoration: none; }
-
-.blog-listing .btn-default {
-  border-color: #b1b1ac;
-  border-color: #2ceefb;
-  border-color: #2C90FB;
-  border-color: #2576CC;
-  color: #575B61;
-  color: #2C90FB;
-  color: #2576CC; }
-
-.blog-listing .btn-default:hover,
-.blog-listing .btn-default:focus {
-  /* background-color: #b1b1ac; */
-  background-color: #2C90FB;
-  background-color: #2576CC;
-  color: #fff; }
-
-.blog-entry {
-  margin-bottom: 70px; }
-
-.druid-header.blog {
-  padding: 0;
-  overflow: hidden; }
-
-.blog .title-spacer {
-  min-height: 240px; }
-
-.blog .title-image-wrap {
-  top: -50%;
-  left: -50%;
-  width: 200%;
-  height: 200%;
-  position: relative;
-  overflow: hidden;
-  min-height: 45px;
-  /* min-height: 200px; */ }
-
-.blog-entry img {
-  max-width: 100%; }
-
-.blog .title-image {
-  position: absolute;
-  min-width: 30%;
-  min-height: 200px;
-  margin: auto;
-  top: 0;
-  right: 0;
-  bottom: 0;
-  left: 0;
-  box-shadow: 0px 2px 9px rgba(0, 0, 0, 0.5); }
-
-.recents ul li {
-  font-weight: 400;
-  margin-bottom: 15px; }
diff --git a/css/bootstrap-pure.css b/css/bootstrap-pure.css
deleted file mode 100644
index 820c985..0000000
--- a/css/bootstrap-pure.css
+++ /dev/null
@@ -1,1855 +0,0 @@
-/*!
- * Bootstrap v3.3.5 (http://getbootstrap.com)
- * Copyright 2011-2015 Twitter, Inc.
- * Licensed under MIT (https://github.com/twbs/bootstrap/blob/master/LICENSE)
- * Modified by Vadim Ogievetsky
- */
-article, aside, details, figure, footer, header, main, menu, nav, section, summary {
-  display: block; }
-
-progress, video {
-  display: inline-block;
-  vertical-align: baseline; }
-
-[hidden], template {
-  display: none; }
-
-b, strong {
-  font-weight: bold; }
-
-mark {
-  color: #000;
-  background: #ff0; }
-
-small {
-  font-size: 80%; }
-
-sub {
-  position: relative;
-  font-size: 75%;
-  line-height: 0;
-  vertical-align: baseline; }
-
-sub {
-  bottom: -.25em; }
-
-img {
-  border: 0; }
-
-figure {
-  margin: 1em 40px; }
-
-hr {
-  height: 0;
-  -webkit-box-sizing: content-box;
-  -moz-box-sizing: content-box;
-  box-sizing: content-box; }
-
-pre {
-  overflow: auto; }
-
-code, pre {
-  font-family: monospace, monospace;
-  font-size: 1em; }
-
-button, input, select {
-  margin: 0;
-  font: inherit;
-  color: inherit; }
-
-button {
-  overflow: visible; }
-
-button, select {
-  text-transform: none; }
-
-button, html input[type="button"], input[type="reset"], input[type="submit"] {
-  -webkit-appearance: button;
-  cursor: pointer; }
-
-button[disabled], html input[disabled] {
-  cursor: default; }
-
-button::-moz-focus-inner, input::-moz-focus-inner {
-  padding: 0;
-  border: 0; }
-
-input {
-  line-height: normal; }
-
-input[type="checkbox"], input[type="radio"] {
-  -webkit-box-sizing: border-box;
-  -moz-box-sizing: border-box;
-  box-sizing: border-box;
-  padding: 0; }
-
-input[type="number"]::-webkit-inner-spin-button, input[type="number"]::-webkit-outer-spin-button {
-  height: auto; }
-
-input[type="search"] {
-  -webkit-box-sizing: content-box;
-  -moz-box-sizing: content-box;
-  box-sizing: content-box;
-  -webkit-appearance: textfield; }
-
-input[type="search"]::-webkit-search-cancel-button, input[type="search"]::-webkit-search-decoration {
-  -webkit-appearance: none; }
-
-table {
-  border-spacing: 0;
-  border-collapse: collapse; }
-
-td, th {
-  padding: 0; }
-
-/*! Source: https://github.com/h5bp/html5-boilerplate/blob/master/src/css/main.css */
-@media print {
-  *, *:before, *:after {
-    color: #000 !important;
-    text-shadow: none !important;
-    background: transparent !important;
-    -webkit-box-shadow: none !important;
-    box-shadow: none !important; }
-  a, a:visited {
-    text-decoration: underline; }
-  a[href]:after {
-    content: " (" attr(href) ")"; }
-  a[href^="#"]:after, a[href^="javascript:"]:after {
-    content: ""; }
-  pre, blockquote {
-    border: 1px solid #999;
-    page-break-inside: avoid; }
-  thead {
-    display: table-header-group; }
-  tr, img {
-    page-break-inside: avoid; }
-  img {
-    max-width: 100% !important; }
-  p, h2, h3 {
-    orphans: 3;
-    widows: 3; }
-  h2, h3 {
-    page-break-after: avoid; }
-  .navbar {
-    display: none; }
-  .label {
-    border: 1px solid #000; }
-  .table {
-    border-collapse: collapse !important; }
-  .table td, .table th {
-    background-color: #fff !important; } }
-
-@font-face {
-  font-family: 'Glyphicons Halflings';
-  src: url("../fonts/glyphicons-halflings-regular.eot");
-  src: url("../fonts/glyphicons-halflings-regular.eot?#iefix") format("embedded-opentype"), url("../fonts/glyphicons-halflings-regular.woff2") format("woff2"), url("../fonts/glyphicons-halflings-regular.woff") format("woff"), url("../fonts/glyphicons-halflings-regular.ttf") format("truetype"), url("../fonts/glyphicons-halflings-regular.svg#glyphicons_halflingsregular") format("svg"); }
-
-* {
-  -webkit-box-sizing: border-box;
-  -moz-box-sizing: border-box;
-  box-sizing: border-box; }
-
-*:before, *:after {
-  -webkit-box-sizing: border-box;
-  -moz-box-sizing: border-box;
-  box-sizing: border-box; }
-
-input, button, select {
-  font-family: inherit;
-  font-size: inherit;
-  line-height: inherit; }
-
-figure {
-  margin: 0; }
-
-.img-rounded {
-  border-radius: 6px; }
-
-.img-circle {
-  border-radius: 50%; }
-
-hr {
-  margin-top: 20px;
-  margin-bottom: 20px;
-  border: 0;
-  border-top: 1px solid #eee; }
-
-.sr-only {
-  position: absolute;
-  width: 1px;
-  height: 1px;
-  padding: 0;
-  margin: -1px;
-  overflow: hidden;
-  clip: rect(0, 0, 0, 0);
-  border: 0; }
-
-[role="button"] {
-  cursor: pointer; }
-
-.lead {
-  margin-bottom: 20px;
-  font-size: 16px;
-  font-weight: 300;
-  line-height: 1.4; }
-
-@media (min-width: 768px) {
-  .lead {
-    font-size: 21px; } }
-
-small, .small {
-  font-size: 85%; }
-
-mark, .mark {
-  padding: .2em;
-  background-color: #fcf8e3; }
-
-.text-left {
-  text-align: left; }
-
-.text-right {
-  text-align: right; }
-
-.text-center {
-  text-align: center; }
-
-.text-lowercase {
-  text-transform: lowercase; }
-
-.text-uppercase {
-  text-transform: uppercase; }
-
-.text-muted {
-  color: #777; }
-
-.text-primary {
-  color: #337ab7; }
-
-a.text-primary:hover, a.text-primary:focus {
-  color: #286090; }
-
-.text-success {
-  color: #3c763d; }
-
-a.text-success:hover, a.text-success:focus {
-  color: #2b542c; }
-
-.text-info {
-  color: #31708f; }
-
-a.text-info:hover, a.text-info:focus {
-  color: #245269; }
-
-.text-warning {
-  color: #8a6d3b; }
-
-a.text-warning:hover, a.text-warning:focus {
-  color: #66512c; }
-
-.text-danger {
-  color: #a94442; }
-
-a.text-danger:hover, a.text-danger:focus {
-  color: #843534; }
-
-.bg-primary {
-  color: #fff;
-  background-color: #337ab7; }
-
-a.bg-primary:hover, a.bg-primary:focus {
-  background-color: #286090; }
-
-.bg-success {
-  background-color: #dff0d8; }
-
-a.bg-success:hover, a.bg-success:focus {
-  background-color: #c1e2b3; }
-
-.bg-info {
-  background-color: #d9edf7; }
-
-a.bg-info:hover, a.bg-info:focus {
-  background-color: #afd9ee; }
-
-.bg-warning {
-  background-color: #fcf8e3; }
-
-a.bg-warning:hover, a.bg-warning:focus {
-  background-color: #f7ecb5; }
-
-.bg-danger {
-  background-color: #f2dede; }
-
-a.bg-danger:hover, a.bg-danger:focus {
-  background-color: #e4b9b9; }
-
-.page-header {
-  padding-bottom: 9px;
-  margin: 40px 0 20px;
-  border-bottom: 1px solid #eee; }
-
-ul, ol {
-  margin-top: 0;
-  margin-bottom: 10px; }
-
-ul ul, ol ul, ul ol, ol ol {
-  margin-bottom: 0; }
-
-.list-inline {
-  padding-left: 0;
-  margin-left: -5px;
-  list-style: none; }
-
-.list-inline > li {
-  display: inline-block;
-  padding-right: 5px;
-  padding-left: 5px; }
-
-dl {
-  margin-top: 0;
-  margin-bottom: 20px; }
-
-dt, dd {
-  line-height: 1.42857143; }
-
-dt {
-  font-weight: bold; }
-
-dd {
-  margin-left: 0; }
-
-@media (min-width: 768px) {
-  .dl-horizontal dt {
-    float: left;
-    width: 160px;
-    overflow: hidden;
-    clear: left;
-    text-align: right;
-    text-overflow: ellipsis;
-    white-space: nowrap; }
-  .dl-horizontal dd {
-    margin-left: 180px; } }
-
-blockquote {
-  padding: 10px 20px;
-  margin: 0 0 20px;
-  font-size: 17.5px;
-  border-left: 5px solid #eee; }
-
-blockquote p:last-child, blockquote ul:last-child, blockquote ol:last-child {
-  margin-bottom: 0; }
-
-blockquote footer, blockquote small, blockquote .small {
-  display: block;
-  font-size: 80%;
-  line-height: 1.42857143;
-  color: #777; }
-
-blockquote footer:before, blockquote small:before, blockquote .small:before {
-  content: '\2014 \00A0'; }
-
-.blockquote-reverse, blockquote.pull-right {
-  padding-right: 15px;
-  padding-left: 0;
-  text-align: right;
-  border-right: 5px solid #eee;
-  border-left: 0; }
-
-.blockquote-reverse footer:before, blockquote.pull-right footer:before, .blockquote-reverse small:before, blockquote.pull-right small:before, .blockquote-reverse .small:before, blockquote.pull-right .small:before {
-  content: ''; }
-
-.blockquote-reverse footer:after, blockquote.pull-right footer:after, .blockquote-reverse small:after, blockquote.pull-right small:after, .blockquote-reverse .small:after, blockquote.pull-right .small:after {
-  content: '\00A0 \2014'; }
-
-address {
-  margin-bottom: 20px;
-  font-style: normal;
-  line-height: 1.42857143; }
-
-code, pre {
-  font-family: Menlo, Monaco, Consolas, "Courier New", monospace; }
-
-code {
-  padding: 2px 4px;
-  font-size: 90%;
-  color: #c7254e;
-  background-color: #f9f2f4;
-  border-radius: 4px; }
-
-pre {
-  display: block;
-  padding: 9.5px;
-  margin: 0 0 10px;
-  font-size: 13px;
-  line-height: 1.42857143;
-  color: #333;
-  word-break: break-all;
-  word-wrap: break-word;
-  background-color: #f5f5f5;
-  border: 1px solid #ccc;
-  border-radius: 4px; }
-
-pre code {
-  padding: 0;
-  font-size: inherit;
-  color: inherit;
-  white-space: pre-wrap;
-  background-color: transparent;
-  border-radius: 0; }
-
-.container {
-  padding-right: 15px;
-  padding-left: 15px;
-  margin-right: auto;
-  margin-left: auto; }
-
-@media (min-width: 768px) {
-  .container {
-    width: 750px; } }
-
-@media (min-width: 992px) {
-  .container {
-    width: 970px; } }
-
-@media (min-width: 1200px) {
-  .container {
-    width: 1170px; } }
-
-.container-fluid {
-  padding-right: 15px;
-  padding-left: 15px;
-  margin-right: auto;
-  margin-left: auto; }
-
-.row {
-  margin-right: -15px;
-  margin-left: -15px; }
-
-.col-xs-1, .col-sm-1, .col-md-1, .col-lg-1, .col-xs-2, .col-sm-2, .col-md-2, .col-lg-2, .col-xs-3, .col-sm-3, .col-md-3, .col-lg-3, .col-xs-4, .col-sm-4, .col-md-4, .col-lg-4, .col-xs-5, .col-sm-5, .col-md-5, .col-lg-5, .col-xs-6, .col-sm-6, .col-md-6, .col-lg-6, .col-xs-7, .col-sm-7, .col-md-7, .col-lg-7, .col-xs-8, .col-sm-8, .col-md-8, .col-lg-8, .col-xs-9, .col-sm-9, .col-md-9, .col-lg-9, .col-xs-10, .col-sm-10, .col-md-10, .col-lg-10, .col-xs-11, .col-sm-11, .col-md-11, .col-lg-11,  [...]
-  position: relative;
-  min-height: 1px;
-  padding-right: 15px;
-  padding-left: 15px; }
-
-.col-xs-1, .col-xs-2, .col-xs-3, .col-xs-4, .col-xs-5, .col-xs-6, .col-xs-7, .col-xs-8, .col-xs-9, .col-xs-10, .col-xs-11, .col-xs-12 {
-  float: left; }
-
-@media (min-width: 992px) {
-  .col-md-1, .col-md-2, .col-md-3, .col-md-4, .col-md-5, .col-md-6, .col-md-7, .col-md-8, .col-md-9, .col-md-10, .col-md-11, .col-md-12 {
-    float: left; }
-  .col-md-12 {
-    width: 100%; }
-  .col-md-11 {
-    width: 91.66666667%; }
-  .col-md-10 {
-    width: 83.33333333%; }
-  .col-md-9 {
-    width: 75%; }
-  .col-md-8 {
-    width: 66.66666667%; }
-  .col-md-7 {
-    width: 58.33333333%; }
-  .col-md-6 {
-    width: 50%; }
-  .col-md-5 {
-    width: 41.66666667%; }
-  .col-md-4 {
-    width: 33.33333333%; }
-  .col-md-3 {
-    width: 25%; }
-  .col-md-2 {
-    width: 16.66666667%; }
-  .col-md-1 {
-    width: 8.33333333%; }
-  .col-md-pull-12 {
-    right: 100%; }
-  .col-md-pull-11 {
-    right: 91.66666667%; }
-  .col-md-pull-10 {
-    right: 83.33333333%; }
-  .col-md-pull-9 {
-    right: 75%; }
-  .col-md-pull-8 {
-    right: 66.66666667%; }
-  .col-md-pull-7 {
-    right: 58.33333333%; }
-  .col-md-pull-6 {
-    right: 50%; }
-  .col-md-pull-5 {
-    right: 41.66666667%; }
-  .col-md-pull-4 {
-    right: 33.33333333%; }
-  .col-md-pull-3 {
-    right: 25%; }
-  .col-md-pull-2 {
-    right: 16.66666667%; }
-  .col-md-pull-1 {
-    right: 8.33333333%; }
-  .col-md-pull-0 {
-    right: auto; }
-  .col-md-push-12 {
-    left: 100%; }
-  .col-md-push-11 {
-    left: 91.66666667%; }
-  .col-md-push-10 {
-    left: 83.33333333%; }
-  .col-md-push-9 {
-    left: 75%; }
-  .col-md-push-8 {
-    left: 66.66666667%; }
-  .col-md-push-7 {
-    left: 58.33333333%; }
-  .col-md-push-6 {
-    left: 50%; }
-  .col-md-push-5 {
-    left: 41.66666667%; }
-  .col-md-push-4 {
-    left: 33.33333333%; }
-  .col-md-push-3 {
-    left: 25%; }
-  .col-md-push-2 {
-    left: 16.66666667%; }
-  .col-md-push-1 {
-    left: 8.33333333%; }
-  .col-md-push-0 {
-    left: auto; }
-  .col-md-offset-12 {
-    margin-left: 100%; }
-  .col-md-offset-11 {
-    margin-left: 91.66666667%; }
-  .col-md-offset-10 {
-    margin-left: 83.33333333%; }
-  .col-md-offset-9 {
-    margin-left: 75%; }
-  .col-md-offset-8 {
-    margin-left: 66.66666667%; }
-  .col-md-offset-7 {
-    margin-left: 58.33333333%; }
-  .col-md-offset-6 {
-    margin-left: 50%; }
-  .col-md-offset-5 {
-    margin-left: 41.66666667%; }
-  .col-md-offset-4 {
-    margin-left: 33.33333333%; }
-  .col-md-offset-3 {
-    margin-left: 25%; }
-  .col-md-offset-2 {
-    margin-left: 16.66666667%; }
-  .col-md-offset-1 {
-    margin-left: 8.33333333%; }
-  .col-md-offset-0 {
-    margin-left: 0; } }
-
-table {
-  background-color: transparent; }
-
-caption {
-  padding-top: 8px;
-  padding-bottom: 8px;
-  color: #777;
-  text-align: left; }
-
-th {
-  text-align: left; }
-
-.table {
-  width: 100%;
-  max-width: 100%;
-  margin-bottom: 20px; }
-
-.table > thead > tr > th, .table > tbody > tr > th, .table > thead > tr > td, .table > tbody > tr > td {
-  padding: 8px;
-  line-height: 1.42857143;
-  vertical-align: top;
-  border-top: 1px solid #ddd; }
-
-.table > thead > tr > th {
-  vertical-align: bottom;
-  border-bottom: 2px solid #ddd; }
-
-.table > caption + thead > tr:first-child > th, .table > thead:first-child > tr:first-child > th, .table > caption + thead > tr:first-child > td, .table > thead:first-child > tr:first-child > td {
-  border-top: 0; }
-
-.table > tbody + tbody {
-  border-top: 2px solid #ddd; }
-
-.table .table {
-  background-color: #fff; }
-
-.table-condensed > thead > tr > th, .table-condensed > tbody > tr > th, .table-condensed > thead > tr > td, .table-condensed > tbody > tr > td {
-  padding: 5px; }
-
-table col[class*="col-"] {
-  position: static;
-  display: table-column;
-  float: none; }
-
-table td[class*="col-"], table th[class*="col-"] {
-  position: static;
-  display: table-cell;
-  float: none; }
-
-.table > thead > tr > td.active, .table > tbody > tr > td.active, .table > thead > tr > th.active, .table > tbody > tr > th.active, .table > thead > tr.active > td, .table > tbody > tr.active > td, .table > thead > tr.active > th, .table > tbody > tr.active > th {
-  background-color: #f5f5f5; }
-
-.table > thead > tr > td.success, .table > tbody > tr > td.success, .table > thead > tr > th.success, .table > tbody > tr > th.success, .table > thead > tr.success > td, .table > tbody > tr.success > td, .table > thead > tr.success > th, .table > tbody > tr.success > th {
-  background-color: #dff0d8; }
-
-.table > thead > tr > td.info, .table > tbody > tr > td.info, .table > thead > tr > th.info, .table > tbody > tr > th.info, .table > thead > tr.info > td, .table > tbody > tr.info > td, .table > thead > tr.info > th, .table > tbody > tr.info > th {
-  background-color: #d9edf7; }
-
-.table > thead > tr > td.warning, .table > tbody > tr > td.warning, .table > thead > tr > th.warning, .table > tbody > tr > th.warning, .table > thead > tr.warning > td, .table > tbody > tr.warning > td, .table > thead > tr.warning > th, .table > tbody > tr.warning > th {
-  background-color: #fcf8e3; }
-
-.table > thead > tr > td.danger, .table > tbody > tr > td.danger, .table > thead > tr > th.danger, .table > tbody > tr > th.danger, .table > thead > tr.danger > td, .table > tbody > tr.danger > td, .table > thead > tr.danger > th, .table > tbody > tr.danger > th {
-  background-color: #f2dede; }
-
-label {
-  display: inline-block;
-  max-width: 100%;
-  margin-bottom: 5px;
-  font-weight: bold; }
-
-input[type="search"] {
-  -webkit-box-sizing: border-box;
-  -moz-box-sizing: border-box;
-  box-sizing: border-box; }
-
-input[type="radio"], input[type="checkbox"] {
-  margin: 4px 0 0;
-  margin-top: 1px \9;
-  line-height: normal; }
-
-input[type="file"] {
-  display: block; }
-
-input[type="range"] {
-  display: block;
-  width: 100%; }
-
-select[multiple], select[size] {
-  height: auto; }
-
-input[type="file"]:focus, input[type="radio"]:focus, input[type="checkbox"]:focus {
-  outline: thin dotted;
-  outline: 5px auto -webkit-focus-ring-color;
-  outline-offset: -2px; }
-
-output {
-  display: block;
-  padding-top: 7px;
-  font-size: 14px;
-  line-height: 1.42857143;
-  color: #555; }
-
-input[type="search"] {
-  -webkit-appearance: none; }
-
-@media screen and (-webkit-min-device-pixel-ratio: 0) {
-  input[type="date"].form-control, input[type="time"].form-control, input[type="datetime-local"].form-control, input[type="month"].form-control {
-    line-height: 34px; }
-  input[type="date"].input-sm, input[type="time"].input-sm, input[type="datetime-local"].input-sm, input[type="month"].input-sm, .input-group-sm input[type="date"], .input-group-sm input[type="time"], .input-group-sm input[type="datetime-local"], .input-group-sm input[type="month"] {
-    line-height: 30px; }
-  input[type="date"].input-lg, input[type="time"].input-lg, input[type="datetime-local"].input-lg, input[type="month"].input-lg, .input-group-lg input[type="date"], .input-group-lg input[type="time"], .input-group-lg input[type="datetime-local"], .input-group-lg input[type="month"] {
-    line-height: 46px; } }
-
-.form-group {
-  margin-bottom: 15px; }
-
-input[type="radio"][disabled], input[type="checkbox"][disabled], input[type="radio"].disabled, input[type="checkbox"].disabled {
-  cursor: not-allowed; }
-
-.form-control-static {
-  min-height: 34px;
-  padding-top: 7px;
-  padding-bottom: 7px;
-  margin-bottom: 0; }
-
-.form-control-static.input-lg, .form-control-static.input-sm {
-  padding-right: 0;
-  padding-left: 0; }
-
-.input-sm {
-  height: 30px;
-  padding: 5px 10px;
-  font-size: 12px;
-  line-height: 1.5;
-  border-radius: 3px; }
-
-select.input-sm {
-  height: 30px;
-  line-height: 30px; }
-
-select[multiple].input-sm {
-  height: auto; }
-
-.input-lg {
-  height: 46px;
-  padding: 10px 16px;
-  font-size: 18px;
-  line-height: 1.3333333;
-  border-radius: 6px; }
-
-select.input-lg {
-  height: 46px;
-  line-height: 46px; }
-
-select[multiple].input-lg {
-  height: auto; }
-
-.has-feedback {
-  position: relative; }
-
-.has-feedback .form-control {
-  padding-right: 42.5px; }
-
-.form-control-feedback {
-  position: absolute;
-  top: 0;
-  right: 0;
-  z-index: 2;
-  display: block;
-  width: 34px;
-  height: 34px;
-  line-height: 34px;
-  text-align: center;
-  pointer-events: none; }
-
-.input-lg + .form-control-feedback, .input-group-lg + .form-control-feedback, .form-group-lg .form-control + .form-control-feedback {
-  width: 46px;
-  height: 46px;
-  line-height: 46px; }
-
-.input-sm + .form-control-feedback, .input-group-sm + .form-control-feedback, .form-group-sm .form-control + .form-control-feedback {
-  width: 30px;
-  height: 30px;
-  line-height: 30px; }
-
-.has-success .help-block, .has-success .control-label {
-  color: #3c763d; }
-
-.has-success .form-control {
-  border-color: #3c763d;
-  -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
-  box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); }
-
-.has-success .form-control:focus {
-  border-color: #2b542c;
-  -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #67b168;
-  box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #67b168; }
-
-.has-success .form-control-feedback {
-  color: #3c763d; }
-
-.has-warning .help-block, .has-warning .control-label {
-  color: #8a6d3b; }
-
-.has-warning .form-control {
-  border-color: #8a6d3b;
-  -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
-  box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); }
-
-.has-warning .form-control:focus {
-  border-color: #66512c;
-  -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #c0a16b;
-  box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #c0a16b; }
-
-.has-warning .form-control-feedback {
-  color: #8a6d3b; }
-
-.has-error .help-block, .has-error .control-label {
-  color: #a94442; }
-
-.has-error .form-control {
-  border-color: #a94442;
-  -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075);
-  box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075); }
-
-.has-error .form-control:focus {
-  border-color: #843534;
-  -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #ce8483;
-  box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 6px #ce8483; }
-
-.has-error .form-control-feedback {
-  color: #a94442; }
-
-.has-feedback label ~ .form-control-feedback {
-  top: 25px; }
-
-.has-feedback label.sr-only ~ .form-control-feedback {
-  top: 0; }
-
-.help-block {
-  display: block;
-  margin-top: 5px;
-  margin-bottom: 10px;
-  color: #737373; }
-
-@media (min-width: 768px) {
-  .form-inline .form-group {
-    display: inline-block;
-    margin-bottom: 0;
-    vertical-align: middle; }
-  .form-inline .form-control {
-    display: inline-block;
-    width: auto;
-    vertical-align: middle; }
-  .form-inline .form-control-static {
-    display: inline-block; }
-  .form-inline .input-group {
-    display: inline-table;
-    vertical-align: middle; }
-  .form-inline .input-group .input-group-btn, .form-inline .input-group .form-control {
-    width: auto; }
-  .form-inline .input-group > .form-control {
-    width: 100%; }
-  .form-inline .control-label {
-    margin-bottom: 0;
-    vertical-align: middle; }
-  .form-inline .has-feedback .form-control-feedback {
-    top: 0; } }
-
-.form-horizontal .form-group {
-  margin-right: -15px;
-  margin-left: -15px; }
-
-@media (min-width: 768px) {
-  .form-horizontal .control-label {
-    padding-top: 7px;
-    margin-bottom: 0;
-    text-align: right; } }
-
-.form-horizontal .has-feedback .form-control-feedback {
-  right: 15px; }
-
-@media (min-width: 768px) {
-  .form-horizontal .form-group-lg .control-label {
-    padding-top: 14.333333px;
-    font-size: 18px; } }
-
-@media (min-width: 768px) {
-  .form-horizontal .form-group-sm .control-label {
-    padding-top: 6px;
-    font-size: 12px; } }
-
-.btn {
-  display: inline-block;
-  padding: 6px 12px;
-  margin-bottom: 0;
-  font-size: 14px;
-  font-weight: normal;
-  line-height: 1.42857143;
-  text-align: center;
-  white-space: nowrap;
-  vertical-align: middle;
-  -ms-touch-action: manipulation;
-  touch-action: manipulation;
-  cursor: pointer;
-  -webkit-user-select: none;
-  -moz-user-select: none;
-  -ms-user-select: none;
-  user-select: none;
-  background-image: none;
-  border: 1px solid transparent;
-  border-radius: 4px; }
-
-.btn:focus, .btn:active:focus, .btn.active:focus, .btn.focus, .btn:active.focus, .btn.active.focus {
-  outline: thin dotted;
-  outline: 5px auto -webkit-focus-ring-color;
-  outline-offset: -2px; }
-
-.btn:hover, .btn:focus, .btn.focus {
-  color: #333;
-  text-decoration: none; }
-
-.btn:active, .btn.active {
-  background-image: none;
-  outline: 0;
-  -webkit-box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125);
-  box-shadow: inset 0 3px 5px rgba(0, 0, 0, 0.125); }
-
-.btn.disabled, .btn[disabled] {
-  cursor: not-allowed;
-  filter: alpha(opacity=65);
-  -webkit-box-shadow: none;
-  box-shadow: none;
-  opacity: .65; }
-
-a.btn.disabled {
-  pointer-events: none; }
-
-.btn-default {
-  color: #333;
-  background-color: #fff;
-  border-color: #ccc; }
-
-.btn-default:focus, .btn-default.focus {
-  color: #333;
-  background-color: #e6e6e6;
-  border-color: #8c8c8c; }
-
-.btn-default:hover {
-  color: #333;
-  background-color: #e6e6e6;
-  border-color: #adadad; }
-
-.btn-default:active, .btn-default.active {
-  color: #333;
-  background-color: #e6e6e6;
-  border-color: #adadad; }
-
-.btn-default:active:hover, .btn-default.active:hover, .btn-default:active:focus, .btn-default.active:focus, .btn-default:active.focus, .btn-default.active.focus {
-  color: #333;
-  background-color: #d4d4d4;
-  border-color: #8c8c8c; }
-
-.btn-default:active, .btn-default.active {
-  background-image: none; }
-
-.btn-default.disabled, .btn-default[disabled], .btn-default.disabled:hover, .btn-default[disabled]:hover, .btn-default.disabled:focus, .btn-default[disabled]:focus, .btn-default.disabled.focus, .btn-default[disabled].focus, .btn-default.disabled:active, .btn-default[disabled]:active, .btn-default.disabled.active, .btn-default[disabled].active {
-  background-color: #fff;
-  border-color: #ccc; }
-
-.btn-primary {
-  color: #fff;
-  background-color: #337ab7;
-  border-color: #2e6da4; }
-
-.btn-primary:focus, .btn-primary.focus {
-  color: #fff;
-  background-color: #286090;
-  border-color: #122b40; }
-
-.btn-primary:hover {
-  color: #fff;
-  background-color: #286090;
-  border-color: #204d74; }
-
-.btn-primary:active, .btn-primary.active {
-  color: #fff;
-  background-color: #286090;
-  border-color: #204d74; }
-
-.btn-primary:active:hover, .btn-primary.active:hover, .btn-primary:active:focus, .btn-primary.active:focus, .btn-primary:active.focus, .btn-primary.active.focus {
-  color: #fff;
-  background-color: #204d74;
-  border-color: #122b40; }
-
-.btn-primary:active, .btn-primary.active {
-  background-image: none; }
-
-.btn-primary.disabled, .btn-primary[disabled], .btn-primary.disabled:hover, .btn-primary[disabled]:hover, .btn-primary.disabled:focus, .btn-primary[disabled]:focus, .btn-primary.disabled.focus, .btn-primary[disabled].focus, .btn-primary.disabled:active, .btn-primary[disabled]:active, .btn-primary.disabled.active, .btn-primary[disabled].active {
-  background-color: #337ab7;
-  border-color: #2e6da4; }
-
-.btn-success {
-  color: #fff;
-  background-color: #5cb85c;
-  border-color: #4cae4c; }
-
-.btn-success:focus, .btn-success.focus {
-  color: #fff;
-  background-color: #449d44;
-  border-color: #255625; }
-
-.btn-success:hover {
-  color: #fff;
-  background-color: #449d44;
-  border-color: #398439; }
-
-.btn-success:active, .btn-success.active {
-  color: #fff;
-  background-color: #449d44;
-  border-color: #398439; }
-
-.btn-success:active:hover, .btn-success.active:hover, .btn-success:active:focus, .btn-success.active:focus, .btn-success:active.focus, .btn-success.active.focus {
-  color: #fff;
-  background-color: #398439;
-  border-color: #255625; }
-
-.btn-success:active, .btn-success.active {
-  background-image: none; }
-
-.btn-success.disabled, .btn-success[disabled], .btn-success.disabled:hover, .btn-success[disabled]:hover, .btn-success.disabled:focus, .btn-success[disabled]:focus, .btn-success.disabled.focus, .btn-success[disabled].focus, .btn-success.disabled:active, .btn-success[disabled]:active, .btn-success.disabled.active, .btn-success[disabled].active {
-  background-color: #5cb85c;
-  border-color: #4cae4c; }
-
-.btn-info {
-  color: #fff;
-  background-color: #5bc0de;
-  border-color: #46b8da; }
-
-.btn-info:focus, .btn-info.focus {
-  color: #fff;
-  background-color: #31b0d5;
-  border-color: #1b6d85; }
-
-.btn-info:hover {
-  color: #fff;
-  background-color: #31b0d5;
-  border-color: #269abc; }
-
-.btn-info:active, .btn-info.active {
-  color: #fff;
-  background-color: #31b0d5;
-  border-color: #269abc; }
-
-.btn-info:active:hover, .btn-info.active:hover, .btn-info:active:focus, .btn-info.active:focus, .btn-info:active.focus, .btn-info.active.focus {
-  color: #fff;
-  background-color: #269abc;
-  border-color: #1b6d85; }
-
-.btn-info:active, .btn-info.active {
-  background-image: none; }
-
-.btn-info.disabled, .btn-info[disabled], .btn-info.disabled:hover, .btn-info[disabled]:hover, .btn-info.disabled:focus, .btn-info[disabled]:focus, .btn-info.disabled.focus, .btn-info[disabled].focus, .btn-info.disabled:active, .btn-info[disabled]:active, .btn-info.disabled.active, .btn-info[disabled].active {
-  background-color: #5bc0de;
-  border-color: #46b8da; }
-
-.btn-warning {
-  color: #fff;
-  background-color: #f0ad4e;
-  border-color: #eea236; }
-
-.btn-warning:focus, .btn-warning.focus {
-  color: #fff;
-  background-color: #ec971f;
-  border-color: #985f0d; }
-
-.btn-warning:hover {
-  color: #fff;
-  background-color: #ec971f;
-  border-color: #d58512; }
-
-.btn-warning:active, .btn-warning.active {
-  color: #fff;
-  background-color: #ec971f;
-  border-color: #d58512; }
-
-.btn-warning:active:hover, .btn-warning.active:hover, .btn-warning:active:focus, .btn-warning.active:focus, .btn-warning:active.focus, .btn-warning.active.focus {
-  color: #fff;
-  background-color: #d58512;
-  border-color: #985f0d; }
-
-.btn-warning:active, .btn-warning.active {
-  background-image: none; }
-
-.btn-warning.disabled, .btn-warning[disabled], .btn-warning.disabled:hover, .btn-warning[disabled]:hover, .btn-warning.disabled:focus, .btn-warning[disabled]:focus, .btn-warning.disabled.focus, .btn-warning[disabled].focus, .btn-warning.disabled:active, .btn-warning[disabled]:active, .btn-warning.disabled.active, .btn-warning[disabled].active {
-  background-color: #f0ad4e;
-  border-color: #eea236; }
-
-.btn-danger {
-  color: #fff;
-  background-color: #d9534f;
-  border-color: #d43f3a; }
-
-.btn-danger:focus, .btn-danger.focus {
-  color: #fff;
-  background-color: #c9302c;
-  border-color: #761c19; }
-
-.btn-danger:hover {
-  color: #fff;
-  background-color: #c9302c;
-  border-color: #ac2925; }
-
-.btn-danger:active, .btn-danger.active {
-  color: #fff;
-  background-color: #c9302c;
-  border-color: #ac2925; }
-
-.btn-danger:active:hover, .btn-danger.active:hover, .btn-danger:active:focus, .btn-danger.active:focus, .btn-danger:active.focus, .btn-danger.active.focus {
-  color: #fff;
-  background-color: #ac2925;
-  border-color: #761c19; }
-
-.btn-danger:active, .btn-danger.active {
-  background-image: none; }
-
-.btn-danger.disabled, .btn-danger[disabled], .btn-danger.disabled:hover, .btn-danger[disabled]:hover, .btn-danger.disabled:focus, .btn-danger[disabled]:focus, .btn-danger.disabled.focus, .btn-danger[disabled].focus, .btn-danger.disabled:active, .btn-danger[disabled]:active, .btn-danger.disabled.active, .btn-danger[disabled].active {
-  background-color: #d9534f;
-  border-color: #d43f3a; }
-
-.btn-link {
-  font-weight: normal;
-  color: #337ab7;
-  border-radius: 0; }
-
-.btn-link, .btn-link:active, .btn-link.active, .btn-link[disabled] {
-  background-color: transparent;
-  -webkit-box-shadow: none;
-  box-shadow: none; }
-
-.btn-link, .btn-link:hover, .btn-link:focus, .btn-link:active {
-  border-color: transparent; }
-
-.btn-link:hover, .btn-link:focus {
-  color: #23527c;
-  text-decoration: underline;
-  background-color: transparent; }
-
-.btn-link[disabled]:hover, .btn-link[disabled]:focus {
-  color: #777;
-  text-decoration: none; }
-
-.btn-lg, .btn-group-lg > .btn {
-  padding: 10px 16px;
-  font-size: 18px;
-  line-height: 1.3333333;
-  border-radius: 6px; }
-
-.btn-sm, .btn-group-sm > .btn {
-  padding: 5px 10px;
-  font-size: 12px;
-  line-height: 1.5;
-  border-radius: 3px; }
-
-.btn-xs, .btn-group-xs > .btn {
-  padding: 1px 5px;
-  font-size: 12px;
-  line-height: 1.5;
-  border-radius: 3px; }
-
-.btn-block {
-  display: block;
-  width: 100%; }
-
-.btn-block + .btn-block {
-  margin-top: 5px; }
-
-input[type="submit"].btn-block, input[type="reset"].btn-block, input[type="button"].btn-block {
-  width: 100%; }
-
-.collapse {
-  display: none; }
-
-.collapse.in {
-  display: block; }
-
-tr.collapse.in {
-  display: table-row; }
-
-tbody.collapse.in {
-  display: table-row-group; }
-
-.open > a {
-  outline: 0; }
-
-.btn-group {
-  position: relative;
-  display: inline-block;
-  vertical-align: middle; }
-
-.btn-group > .btn {
-  position: relative;
-  float: left; }
-
-.btn-group > .btn:hover, .btn-group > .btn:focus, .btn-group > .btn:active, .btn-group > .btn.active {
-  z-index: 2; }
-
-.btn-group .btn + .btn, .btn-group .btn + .btn-group, .btn-group .btn-group + .btn, .btn-group .btn-group + .btn-group {
-  margin-left: -1px; }
-
-.btn-group > .btn:first-child {
-  margin-left: 0; }
-
-.btn-group > .btn:last-child:not(:first-child) {
-  border-top-left-radius: 0;
-  border-bottom-left-radius: 0; }
-
-.btn-group > .btn-group {
-  float: left; }
-
-.btn-group > .btn-group:not(:first-child):not(:last-child) > .btn {
-  border-radius: 0; }
-
-.btn-group > .btn-group:first-child:not(:last-child) > .btn:last-child {
-  border-top-right-radius: 0;
-  border-bottom-right-radius: 0; }
-
-.btn-group > .btn-group:last-child:not(:first-child) > .btn:first-child {
-  border-top-left-radius: 0;
-  border-bottom-left-radius: 0; }
-
-[data-toggle="buttons"] > .btn input[type="radio"], [data-toggle="buttons"] > .btn-group > .btn input[type="radio"], [data-toggle="buttons"] > .btn input[type="checkbox"], [data-toggle="buttons"] > .btn-group > .btn input[type="checkbox"] {
-  position: absolute;
-  clip: rect(0, 0, 0, 0);
-  pointer-events: none; }
-
-.input-group {
-  position: relative;
-  display: table;
-  border-collapse: separate; }
-
-.input-group[class*="col-"] {
-  float: none;
-  padding-right: 0;
-  padding-left: 0; }
-
-.input-group .form-control {
-  position: relative;
-  z-index: 2;
-  float: left;
-  width: 100%;
-  margin-bottom: 0; }
-
-.input-group-lg > .form-control, .input-group-lg > .input-group-btn > .btn {
-  height: 46px;
-  padding: 10px 16px;
-  font-size: 18px;
-  line-height: 1.3333333;
-  border-radius: 6px; }
-
-select.input-group-lg > .form-control, select.input-group-lg > .input-group-btn > .btn {
-  height: 46px;
-  line-height: 46px; }
-
-select[multiple].input-group-lg > .form-control, select[multiple].input-group-lg > .input-group-btn > .btn {
-  height: auto; }
-
-.input-group-sm > .form-control, .input-group-sm > .input-group-btn > .btn {
-  height: 30px;
-  padding: 5px 10px;
-  font-size: 12px;
-  line-height: 1.5;
-  border-radius: 3px; }
-
-select.input-group-sm > .form-control, select.input-group-sm > .input-group-btn > .btn {
-  height: 30px;
-  line-height: 30px; }
-
-select[multiple].input-group-sm > .form-control, select[multiple].input-group-sm > .input-group-btn > .btn {
-  height: auto; }
-
-.input-group-btn, .input-group .form-control {
-  display: table-cell; }
-
-.input-group-btn:not(:first-child):not(:last-child), .input-group .form-control:not(:first-child):not(:last-child) {
-  border-radius: 0; }
-
-.input-group-btn {
-  width: 1%;
-  white-space: nowrap;
-  vertical-align: middle; }
-
-.input-group .form-control:first-child, .input-group-btn:first-child > .btn, .input-group-btn:first-child > .btn-group > .btn, .input-group-btn:last-child > .btn-group:not(:last-child) > .btn {
-  border-top-right-radius: 0;
-  border-bottom-right-radius: 0; }
-
-.input-group .form-control:last-child, .input-group-btn:last-child > .btn, .input-group-btn:last-child > .btn-group > .btn, .input-group-btn:first-child > .btn:not(:first-child), .input-group-btn:first-child > .btn-group:not(:first-child) > .btn {
-  border-top-left-radius: 0;
-  border-bottom-left-radius: 0; }
-
-.input-group-btn {
-  position: relative;
-  font-size: 0;
-  white-space: nowrap; }
-
-.input-group-btn > .btn {
-  position: relative; }
-
-.input-group-btn > .btn + .btn {
-  margin-left: -1px; }
-
-.input-group-btn > .btn:hover, .input-group-btn > .btn:focus, .input-group-btn > .btn:active {
-  z-index: 2; }
-
-.input-group-btn:first-child > .btn, .input-group-btn:first-child > .btn-group {
-  margin-right: -1px; }
-
-.input-group-btn:last-child > .btn, .input-group-btn:last-child > .btn-group {
-  z-index: 2;
-  margin-left: -1px; }
-
-.nav {
-  padding-left: 0;
-  margin-bottom: 0;
-  list-style: none; }
-
-.nav > li {
-  position: relative;
-  display: block; }
-
-.nav > li > a {
-  position: relative;
-  display: block;
-  padding: 10px 15px; }
-
-.nav > li > a:hover, .nav > li > a:focus {
-  text-decoration: none;
-  background-color: #eee; }
-
-.nav > li.disabled > a {
-  color: #777; }
-
-.nav > li.disabled > a:hover, .nav > li.disabled > a:focus {
-  color: #777;
-  text-decoration: none;
-  cursor: not-allowed;
-  background-color: transparent; }
-
-.nav .open > a, .nav .open > a:hover, .nav .open > a:focus {
-  background-color: #eee;
-  border-color: #337ab7; }
-
-.nav > li > a > img {
-  max-width: none; }
-
-.tab-content > .active {
-  display: block; }
-
-.navbar {
-  position: relative;
-  min-height: 50px;
-  margin-bottom: 20px;
-  border: 1px solid transparent; }
-
-@media (min-width: 768px) {
-  .navbar {
-    border-radius: 4px; } }
-
-@media (min-width: 768px) {
-  .navbar-header {
-    float: left; } }
-
-.navbar-collapse {
-  padding-right: 15px;
-  padding-left: 15px;
-  overflow-x: visible;
-  -webkit-overflow-scrolling: touch;
-  border-top: 1px solid transparent;
-  -webkit-box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1);
-  box-shadow: inset 0 1px 0 rgba(255, 255, 255, 0.1); }
-
-.navbar-collapse.in {
-  overflow-y: auto; }
-
-@media (min-width: 768px) {
-  .navbar-collapse {
-    width: auto;
-    border-top: 0;
-    -webkit-box-shadow: none;
-    box-shadow: none; }
-  .navbar-collapse.collapse {
-    display: block !important;
-    height: auto !important;
-    padding-bottom: 0;
-    overflow: visible !important; }
-  .navbar-collapse.in {
-    overflow-y: visible; }
-  .navbar-fixed-top .navbar-collapse, .navbar-static-top .navbar-collapse, .navbar-fixed-bottom .navbar-collapse {
-    padding-right: 0;
-    padding-left: 0; } }
-
-.navbar-fixed-top .navbar-collapse, .navbar-fixed-bottom .navbar-collapse {
-  max-height: 340px; }
-
-@media (max-device-width: 480px) and (orientation: landscape) {
-  .navbar-fixed-top .navbar-collapse, .navbar-fixed-bottom .navbar-collapse {
-    max-height: 200px; } }
-
-.container > .navbar-header, .container-fluid > .navbar-header, .container > .navbar-collapse, .container-fluid > .navbar-collapse {
-  margin-right: -15px;
-  margin-left: -15px; }
-
-@media (min-width: 768px) {
-  .container > .navbar-header, .container-fluid > .navbar-header, .container > .navbar-collapse, .container-fluid > .navbar-collapse {
-    margin-right: 0;
-    margin-left: 0; } }
-
-.navbar-static-top {
-  z-index: 1000;
-  border-width: 0 0 1px; }
-
-@media (min-width: 768px) {
-  .navbar-static-top {
-    border-radius: 0; } }
-
-.navbar-fixed-top, .navbar-fixed-bottom {
-  position: fixed;
-  right: 0;
-  left: 0;
-  z-index: 1030; }
-
-@media (min-width: 768px) {
-  .navbar-fixed-top, .navbar-fixed-bottom {
-    border-radius: 0; } }
-
-.navbar-fixed-top {
-  top: 0;
-  border-width: 0 0 1px; }
-
-.navbar-fixed-bottom {
-  bottom: 0;
-  margin-bottom: 0;
-  border-width: 1px 0 0; }
-
-.navbar-brand {
-  float: left;
-  height: 50px;
-  padding: 15px 15px;
-  font-size: 18px;
-  line-height: 20px; }
-
-.navbar-brand:hover, .navbar-brand:focus {
-  text-decoration: none; }
-
-.navbar-brand > img {
-  display: block; }
-
-@media (min-width: 768px) {
-  .navbar > .container .navbar-brand, .navbar > .container-fluid .navbar-brand {
-    margin-left: -15px; } }
-
-.navbar-toggle {
-  position: relative;
-  float: right;
-  padding: 9px 10px;
-  margin-top: 8px;
-  margin-right: 15px;
-  margin-bottom: 8px;
-  background-color: transparent;
-  background-image: none;
-  border: 1px solid transparent;
-  border-radius: 4px; }
-
-.navbar-toggle:focus {
-  outline: 0; }
-
-.navbar-toggle .icon-bar {
-  display: block;
-  width: 22px;
-  height: 2px;
-  border-radius: 1px; }
-
-.navbar-toggle .icon-bar + .icon-bar {
-  margin-top: 4px; }
-
-@media (min-width: 768px) {
-  .navbar-toggle {
-    display: none; } }
-
-.navbar-nav {
-  margin: 7.5px -15px; }
-
-.navbar-nav > li > a {
-  padding-top: 10px;
-  padding-bottom: 10px;
-  line-height: 20px; }
-
-@media (min-width: 768px) {
-  .navbar-nav {
-    float: left;
-    margin: 0; }
-  .navbar-nav > li {
-    float: left; }
-  .navbar-nav > li > a {
-    padding-top: 15px;
-    padding-bottom: 15px; } }
-
-@media (min-width: 768px) {
-  .navbar-right {
-    float: right !important;
-    margin-right: -15px; }
-  .navbar-right ~ .navbar-right {
-    margin-right: 0; } }
-
-.navbar-default {
-  background-color: #f8f8f8;
-  border-color: #e7e7e7; }
-
-.navbar-default .navbar-brand {
-  color: #777; }
-
-.navbar-default .navbar-brand:hover, .navbar-default .navbar-brand:focus {
-  color: #5e5e5e;
-  background-color: transparent; }
-
-.navbar-default .navbar-text {
-  color: #777; }
-
-.navbar-default .navbar-nav > li > a {
-  color: #777; }
-
-.navbar-default .navbar-nav > li > a:hover, .navbar-default .navbar-nav > li > a:focus {
-  color: #333;
-  background-color: transparent; }
-
-.navbar-default .navbar-nav > .active > a, .navbar-default .navbar-nav > .active > a:hover, .navbar-default .navbar-nav > .active > a:focus {
-  color: #555;
-  background-color: #e7e7e7; }
-
-.navbar-default .navbar-nav > .disabled > a, .navbar-default .navbar-nav > .disabled > a:hover, .navbar-default .navbar-nav > .disabled > a:focus {
-  color: #ccc;
-  background-color: transparent; }
-
-.navbar-default .navbar-toggle {
-  border-color: #ddd; }
-
-.navbar-default .navbar-toggle:hover, .navbar-default .navbar-toggle:focus {
-  background-color: #ddd; }
-
-.navbar-default .navbar-toggle .icon-bar {
-  background-color: #888; }
-
-.navbar-default .navbar-collapse, .navbar-default .navbar-form {
-  border-color: #e7e7e7; }
-
-.navbar-default .navbar-nav > .open > a, .navbar-default .navbar-nav > .open > a:hover, .navbar-default .navbar-nav > .open > a:focus {
-  color: #555;
-  background-color: #e7e7e7; }
-
-.navbar-default .navbar-link {
-  color: #777; }
-
-.navbar-default .navbar-link:hover {
-  color: #333; }
-
-.navbar-default .btn-link {
-  color: #777; }
-
-.navbar-default .btn-link:hover, .navbar-default .btn-link:focus {
-  color: #333; }
-
-.navbar-default .btn-link[disabled]:hover, .navbar-default .btn-link[disabled]:focus {
-  color: #ccc; }
-
-.navbar-inverse {
-  background-color: #222;
-  border-color: #080808; }
-
-.navbar-inverse .navbar-brand {
-  color: #9d9d9d; }
-
-.navbar-inverse .navbar-brand:hover, .navbar-inverse .navbar-brand:focus {
-  color: #fff;
-  background-color: transparent; }
-
-.navbar-inverse .navbar-text {
-  color: #9d9d9d; }
-
-.navbar-inverse .navbar-nav > li > a {
-  color: #9d9d9d; }
-
-.navbar-inverse .navbar-nav > li > a:hover, .navbar-inverse .navbar-nav > li > a:focus {
-  color: #fff;
-  background-color: transparent; }
-
-.navbar-inverse .navbar-nav > .active > a, .navbar-inverse .navbar-nav > .active > a:hover, .navbar-inverse .navbar-nav > .active > a:focus {
-  color: #fff;
-  background-color: #080808; }
-
-.navbar-inverse .navbar-nav > .disabled > a, .navbar-inverse .navbar-nav > .disabled > a:hover, .navbar-inverse .navbar-nav > .disabled > a:focus {
-  color: #444;
-  background-color: transparent; }
-
-.navbar-inverse .navbar-toggle {
-  border-color: #333; }
-
-.navbar-inverse .navbar-toggle:hover, .navbar-inverse .navbar-toggle:focus {
-  background-color: #333; }
-
-.navbar-inverse .navbar-toggle .icon-bar {
-  background-color: #fff; }
-
-.navbar-inverse .navbar-collapse, .navbar-inverse .navbar-form {
-  border-color: #101010; }
-
-.navbar-inverse .navbar-nav > .open > a, .navbar-inverse .navbar-nav > .open > a:hover, .navbar-inverse .navbar-nav > .open > a:focus {
-  color: #fff;
-  background-color: #080808; }
-
-.navbar-inverse .navbar-link {
-  color: #9d9d9d; }
-
-.navbar-inverse .navbar-link:hover {
-  color: #fff; }
-
-.navbar-inverse .btn-link {
-  color: #9d9d9d; }
-
-.navbar-inverse .btn-link:hover, .navbar-inverse .btn-link:focus {
-  color: #fff; }
-
-.navbar-inverse .btn-link[disabled]:hover, .navbar-inverse .btn-link[disabled]:focus {
-  color: #444; }
-
-.label {
-  display: inline;
-  padding: .2em .6em .3em;
-  font-size: 75%;
-  font-weight: bold;
-  line-height: 1;
-  color: #fff;
-  text-align: center;
-  white-space: nowrap;
-  vertical-align: baseline;
-  border-radius: .25em; }
-
-a.label:hover, a.label:focus {
-  color: #fff;
-  text-decoration: none;
-  cursor: pointer; }
-
-.label:empty {
-  display: none; }
-
-.btn .label {
-  position: relative;
-  top: -1px; }
-
-.alert {
-  padding: 15px;
-  margin-bottom: 20px;
-  border: 1px solid transparent;
-  border-radius: 4px; }
-
-.alert h4 {
-  margin-top: 0;
-  color: inherit; }
-
-.alert .alert-link {
-  font-weight: bold; }
-
-.alert > p, .alert > ul {
-  margin-bottom: 0; }
-
-.alert > p + p {
-  margin-top: 5px; }
-
-.alert-success {
-  color: #3c763d;
-  background-color: #dff0d8;
-  border-color: #d6e9c6; }
-
-.alert-success hr {
-  border-top-color: #c9e2b3; }
-
-.alert-success .alert-link {
-  color: #2b542c; }
-
-.alert-info {
-  color: #31708f;
-  background-color: #d9edf7;
-  border-color: #bce8f1; }
-
-.alert-info hr {
-  border-top-color: #a6e1ec; }
-
-.alert-info .alert-link {
-  color: #245269; }
-
-.alert-warning {
-  color: #8a6d3b;
-  background-color: #fcf8e3;
-  border-color: #faebcc; }
-
-.alert-warning hr {
-  border-top-color: #f7e1b5; }
-
-.alert-warning .alert-link {
-  color: #66512c; }
-
-.alert-danger {
-  color: #a94442;
-  background-color: #f2dede;
-  border-color: #ebccd1; }
-
-.alert-danger hr {
-  border-top-color: #e4b9c0; }
-
-.alert-danger .alert-link {
-  color: #843534; }
-
-.media {
-  margin-top: 15px; }
-
-.media:first-child {
-  margin-top: 0; }
-
-.media, .media-body {
-  overflow: hidden;
-  zoom: 1; }
-
-.media-body {
-  width: 10000px; }
-
-.media-object {
-  display: block; }
-
-.media-right, .media > .pull-right {
-  padding-left: 10px; }
-
-.media-left, .media > .pull-left {
-  padding-right: 10px; }
-
-.media-left, .media-right, .media-body {
-  display: table-cell;
-  vertical-align: top; }
-
-.media-middle {
-  vertical-align: middle; }
-
-.media-bottom {
-  vertical-align: bottom; }
-
-.media-heading {
-  margin-top: 0;
-  margin-bottom: 5px; }
-
-.media-list {
-  padding-left: 0;
-  list-style: none; }
-
-.close {
-  float: right;
-  font-size: 21px;
-  font-weight: bold;
-  line-height: 1;
-  color: #000;
-  text-shadow: 0 1px 0 #fff;
-  filter: alpha(opacity=20);
-  opacity: .2; }
-
-.close:hover, .close:focus {
-  color: #000;
-  text-decoration: none;
-  cursor: pointer;
-  filter: alpha(opacity=50);
-  opacity: .5; }
-
-button.close {
-  -webkit-appearance: none;
-  padding: 0;
-  cursor: pointer;
-  background: transparent;
-  border: 0; }
-
-.dl-horizontal dd:before, .dl-horizontal dd:after, .container:before, .container:after, .container-fluid:before, .container-fluid:after, .row:before, .row:after, .form-horizontal .form-group:before, .form-horizontal .form-group:after, .nav:before, .nav:after, .navbar:before, .navbar:after, .navbar-header:before, .navbar-header:after, .navbar-collapse:before, .navbar-collapse:after, .panel-body:before, .panel-body:after {
-  display: table;
-  content: " "; }
-
-.dl-horizontal dd:after, .container:after, .container-fluid:after, .row:after, .form-horizontal .form-group:after, .nav:after, .navbar:after, .navbar-header:after, .navbar-collapse:after, .panel-body:after {
-  clear: both; }
-
-.center-block {
-  display: block;
-  margin-right: auto;
-  margin-left: auto; }
-
-.pull-right {
-  float: right !important; }
-
-.pull-left {
-  float: left !important; }
-
-.hide {
-  display: none !important; }
-
-.show {
-  display: block !important; }
-
-.text-hide {
-  font: 0/0 a;
-  color: transparent;
-  text-shadow: none;
-  background-color: transparent;
-  border: 0; }
-
-.hidden {
-  display: none !important; }
-
-@-ms-viewport {
-  width: device-width; }
-
-.visible-xs, .visible-sm, .visible-md, .visible-lg {
-  display: none !important; }
-
-.visible-xs-block, .visible-xs-inline, .visible-xs-inline-block, .visible-sm-block, .visible-sm-inline, .visible-sm-inline-block, .visible-md-block, .visible-md-inline, .visible-md-inline-block, .visible-lg-block, .visible-lg-inline, .visible-lg-inline-block {
-  display: none !important; }
-
-@media (max-width: 767px) {
-  .visible-xs {
-    display: block !important; }
-  table.visible-xs {
-    display: table !important; }
-  tr.visible-xs {
-    display: table-row !important; }
-  th.visible-xs, td.visible-xs {
-    display: table-cell !important; } }
-
-@media (max-width: 767px) {
-  .visible-xs-block {
-    display: block !important; } }
-
-@media (max-width: 767px) {
-  .visible-xs-inline {
-    display: inline !important; } }
-
-@media (max-width: 767px) {
-  .visible-xs-inline-block {
-    display: inline-block !important; } }
-
-@media (min-width: 768px) and (max-width: 991px) {
-  .visible-sm {
-    display: block !important; }
-  table.visible-sm {
-    display: table !important; }
-  tr.visible-sm {
-    display: table-row !important; }
-  th.visible-sm, td.visible-sm {
-    display: table-cell !important; } }
-
-@media (min-width: 768px) and (max-width: 991px) {
-  .visible-sm-block {
-    display: block !important; } }
-
-@media (min-width: 768px) and (max-width: 991px) {
-  .visible-sm-inline {
-    display: inline !important; } }
-
-@media (min-width: 768px) and (max-width: 991px) {
-  .visible-sm-inline-block {
-    display: inline-block !important; } }
-
-@media (min-width: 992px) and (max-width: 1199px) {
-  .visible-md {
-    display: block !important; }
-  table.visible-md {
-    display: table !important; }
-  tr.visible-md {
-    display: table-row !important; }
-  th.visible-md, td.visible-md {
-    display: table-cell !important; } }
-
-@media (min-width: 992px) and (max-width: 1199px) {
-  .visible-md-block {
-    display: block !important; } }
-
-@media (min-width: 992px) and (max-width: 1199px) {
-  .visible-md-inline {
-    display: inline !important; } }
-
-@media (min-width: 992px) and (max-width: 1199px) {
-  .visible-md-inline-block {
-    display: inline-block !important; } }
-
-@media (max-width: 767px) {
-  .hidden-xs {
-    display: none !important; } }
-
-@media (min-width: 768px) and (max-width: 991px) {
-  .hidden-sm {
-    display: none !important; } }
-
-@media (min-width: 992px) and (max-width: 1199px) {
-  .hidden-md {
-    display: none !important; } }
-
-@media (min-width: 1200px) {
-  .hidden-lg {
-    display: none !important; } }
-
-@media print {
-  .hidden-print {
-    display: none !important; } }
diff --git a/css/docs.css b/css/docs.css
deleted file mode 100644
index 1bea588..0000000
--- a/css/docs.css
+++ /dev/null
@@ -1,126 +0,0 @@
-.doc-container {
-  padding-top: 28px; }
-
-.doc-content pre, .doc-content pre code {
-  overflow: auto;
-  white-space: pre;
-  word-wrap: normal; }
-
-.doc-content img {
-  max-width: 847.5px; }
-
-.doc-content code {
-  background-color: #e0e0e0; }
-
-.doc-content pre code {
-  background-color: transparent; }
-
-.doc-content table,
-.doc-content pre {
-  margin: 35px 0 35px 0; }
-
-.doc-content table,
-.doc-content table > thead > tr > th,
-.doc-content table > tbody > tr > th,
-.doc-content table > tfoot > tr > th,
-.doc-content table > thead > tr > td,
-.doc-content table > tbody > tr > td,
-.doc-content table > tfoot > tr > td {
-  border: 1px solid #dddddd; }
-
-.doc-content table > thead > tr > th,
-.doc-content table > thead > tr > td {
-  border-bottom-width: 2px; }
-
-.doc-content table > tbody > tr:nth-child(odd) > td,
-.doc-content table > tbody > tr:nth-child(odd) > th {
-  background-color: #f9f9f9; }
-
-.doc-content table > tbody > tr:hover > td,
-.doc-content table > tbody > tr:hover > th,
-.doc-content table > tbody > tr:focus > td,
-.doc-content table > tbody > tr:focus > th {
-  background-color: #d5d5d5; }
-
-.doc-content table code {
-  background-color: transparent; }
-
-.doc-content td, .doc-content th {
-  padding: 5px; }
-
-.doc-content .note {
-  position: relative;
-  display: block;
-  padding: 10px 14px 10px 42px;
-  margin: 35px 0 35px 0;
-  background: light-gray;
-  border-radius: 3px;
-  line-height: 170%; }
-  .doc-content .note:after {
-    content: '';
-    position: absolute;
-    top: 2px;
-    left: 0;
-    bottom: 0;
-    width: 42px;
-    background-position: 10px 9px;
-    background-size: 22px 22px;
-    background-repeat: no-repeat; }
-  .doc-content .note.info {
-    background: rgba(51, 200, 208, 0.26);
-    border-left: 2px solid rgba(51, 200, 208, 0.5); }
-    .doc-content .note.info:after {
-      background-image: url(/img/note-info.svg); }
-  .doc-content .note.caution {
-    background: rgba(249, 169, 86, 0.26);
-    border-left: 2px solid rgba(249, 169, 86, 0.5); }
-    .doc-content .note.caution:after {
-      background-image: url(/img/note-caution.svg); }
-
-.toc h2 {
-  font-size: 28px; }
-
-.toc ul {
-  list-style: none;
-  list-style-position: inside;
-  color: #4460de;
-  line-height: 1.9em; }
-  .toc ul li {
-    color: #999999;
-    font-weight: 600;
-    list-style-type: none;
-    margin-left: 0; }
-    .toc ul li a {
-      color: #4460de;
-      display: block; }
-      .toc ul li a:hover {
-        text-decoration: underline; }
-  .toc ul ul {
-    margin-top: 0;
-    margin-bottom: 0;
-    padding-left: 20px; }
-
-.searchbox {
-  position: relative;
-  height: 26px;
-  margin-top: 17px; }
-  .searchbox .gsc-search-button, .searchbox .searchbox .gsib_b {
-    display: none; }
-  .searchbox input {
-    background: white !important;
-    width: 100% !important;
-    padding: 5px 8px !important;
-    font-size: 13px !important;
-    border: 1px solid #dfdfdf !important;
-    height: 30px !important;
-    text-indent: 0 !important;
-    border-radius: 2px !important; }
-    .searchbox input:hover {
-      border-color: #c4c4c4 !important; }
-    .searchbox input:focus {
-      border-color: #c4c4c4 !important; }
-  .searchbox form, .searchbox table, .searchbox tbody, .searchbox tr, .searchbox td, .searchbox .gsc-input-box {
-    border: none !important;
-    padding: 0 !important;
-    margin: 0 !important;
-    background: none !important; }
diff --git a/css/footer.css b/css/footer.css
deleted file mode 100644
index 4c22d96..0000000
--- a/css/footer.css
+++ /dev/null
@@ -1,29 +0,0 @@
-footer {
-  line-height: 1.4em; }
-  footer a:hover {
-    color: #4460de; }
-  footer .license {
-    margin-top: 12px;
-    font-size: 11px;
-    color: #aaa;
-    line-height: 1.4em; }
-    footer .license a:hover {
-      text-decoration: underline; }
-
-footer .license {
-  margin-top: 12px;
-  font-size: 11px;
-  color: #aaa; }
-  footer .license a:hover {
-    color: #4460de;
-    text-decoration: underline; }
-
-.druid-footer {
-  padding: 32px 0 48px 0;
-  background-color: #f3f3f3;
-  border-top: 1px solid white;
-  margin-top: 50px; }
-  .druid-footer .fa, .druid-footer .fab, .druid-footer .fas {
-    font-size: 18px;
-    margin: 6px 0;
-    color: #4460de; }
diff --git a/css/header.css b/css/header.css
deleted file mode 100644
index 0fb02ec..0000000
--- a/css/header.css
+++ /dev/null
@@ -1,110 +0,0 @@
-.top-navigator {
-  background: #1C1C26;
-  height: 54px;
-  position: fixed;
-  top: 0;
-  left: 0;
-  right: 0;
-  z-index: 100; }
-  .top-navigator .left-cont .druid-logo {
-    display: inline-block;
-    height: 54px;
-    width: 120px;
-    margin-bottom: -2px;
-    background-position: center;
-    background-image: url("/img/druid_nav.png");
-    background-size: 120px auto;
-    background-repeat: no-repeat; }
-  .top-navigator .right-cont {
-    position: absolute;
-    top: 0;
-    right: 15px; }
-    .top-navigator .right-cont ul {
-      margin: 0; }
-    .top-navigator .right-cont li {
-      line-height: 54px;
-      display: inline-block;
-      font-size: 15px;
-      margin: 0; }
-      .top-navigator .right-cont li.active a {
-        color: white; }
-      .top-navigator .right-cont li.active:after {
-        content: '';
-        position: absolute;
-        height: 2px;
-        bottom: 0;
-        left: 0;
-        right: 0; }
-      .top-navigator .right-cont li a {
-        display: block;
-        padding-left: 8px;
-        padding-right: 8px;
-        color: #9caeff; }
-        .top-navigator .right-cont li a:hover {
-          text-decoration: none;
-          color: white; }
-      .top-navigator .right-cont li.button-link {
-        margin-left: 8px; }
-        .top-navigator .right-cont li.button-link a {
-          display: inline-block;
-          height: 32px;
-          line-height: 32px;
-          margin-top: 9px;
-          background: #9caeff;
-          border-radius: 2px;
-          padding: 0 10px;
-          color: #1C1C26;
-          font-weight: 600;
-          min-width: 106px;
-          text-align: center; }
-          .top-navigator .right-cont li.button-link a:hover {
-            background: white; }
-  .top-navigator .action-button {
-    position: absolute;
-    top: 10px;
-    right: 30px;
-    padding: 8px 16px;
-    text-align: center;
-    border-radius: 2px;
-    cursor: pointer;
-    display: none;
-    color: #9caeff;
-    font-size: 18px;
-    line-height: 18px;
-    font-weight: 600; }
-    .top-navigator .action-button .fa {
-      margin-right: 6px; }
-    .top-navigator .action-button:hover {
-      color: white; }
-  .top-navigator .header-dropdown .header-dropdown-menu {
-    display: none;
-    z-index: 100;
-    position: absolute;
-    top: 54px;
-    left: 0;
-    width: 200px;
-    background: #3a3a52;
-    box-shadow: 0 3px 6px 0 rgba(0, 0, 0, 0.18);
-    overflow: visible; }
-  .top-navigator .header-dropdown:hover .header-dropdown-menu {
-    display: block; }
-
-@media (max-width: 840px) {
-  body {
-    margin-top: 0; }
-  .top-navigator {
-    height: auto;
-    min-height: 54px;
-    position: relative; }
-    .top-navigator .right-cont {
-      position: relative;
-      display: block;
-      display: none;
-      padding-bottom: 28px; }
-      .top-navigator .right-cont ul {
-        margin-left: 20px; }
-      .top-navigator .right-cont li {
-        display: block;
-        line-height: 42px; }
-    .top-navigator .action-button.menu-icon {
-      display: block; } }
diff --git a/css/index.css b/css/index.css
deleted file mode 100644
index 244ec40..0000000
--- a/css/index.css
+++ /dev/null
@@ -1,50 +0,0 @@
-.druid-masthead {
-  padding: 40px 20px 32px 20px;
-  background-color: #3b3b50;
-  text-align: center;
-  margin: 0 auto 20px;
-  margin-bottom: 14px;
-  color: white;
-  overflow: hidden;
-  background-image: url("/img/watermark-dark.png");
-  background-size: 800px auto;
-  background-repeat: no-repeat;
-  background-position: 26% -76px; }
-  .druid-masthead .button {
-    display: inline-block;
-    min-width: 155px;
-    margin: 6px;
-    font-size: 1.1em;
-    line-height: 1.4em;
-    font-weight: 600;
-    padding: 9px 12px;
-    border-radius: 2px;
-    background: #9caeff;
-    color: #1C1C26;
-    transition: background-color 0.1s ease; }
-    .druid-masthead .button .fa, .druid-masthead .button .fab {
-      font-size: 16px;
-      margin-right: 3px; }
-    .druid-masthead .button:hover, .druid-masthead .button:active {
-      background: white;
-      text-decoration: none; }
-  .druid-masthead .lead {
-    font-size: 1.7em;
-    line-height: 1.7em;
-    font-weight: 600;
-    margin-top: 8px;
-    margin-bottom: 26px;
-    max-width: 820px;
-    margin-left: auto;
-    margin-right: auto; }
-  .druid-masthead b {
-    font-weight: 700; }
-
-.bottom-news {
-  display: block; }
-
-@media (max-width: 500px) {
-  .druid-masthead .lead {
-    font-size: 1.4em; }
-  .druid-masthead .button {
-    min-width: 130px; } }
diff --git a/css/news-list.css b/css/news-list.css
deleted file mode 100644
index dfea42e..0000000
--- a/css/news-list.css
+++ /dev/null
@@ -1,63 +0,0 @@
-.item-list {
-  margin-top: 6px;
-  padding: 0 10px; }
-  .item-list h3 {
-    font-size: 22px; }
-  .item-list a {
-    display: block;
-    line-height: 1.5em; }
-    .item-list a:hover {
-      text-decoration: none; }
-      .item-list a:hover span.title {
-        text-decoration: underline; }
-  .item-list .text-muted {
-    color: #888;
-    font-weight: 400;
-    font-size: 14px;
-    line-height: 15px; }
-  .item-list .event {
-    position: relative;
-    padding-left: 42px;
-    margin-bottom: 12px; }
-  .item-list .mini-cal {
-    position: absolute;
-    top: 4px;
-    left: 0;
-    background: #4460de;
-    padding: 2px;
-    width: 32px;
-    text-align: center;
-    border-radius: 2px;
-    padding-top: 16px; }
-  .item-list .date-month {
-    position: absolute;
-    top: 0;
-    left: 0;
-    right: 0;
-    padding: 1px;
-    background: #4460de;
-    border-radius: 2px;
-    color: white;
-    font-size: 10px;
-    line-height: 14px;
-    font-weight: 600;
-    text-transform: uppercase; }
-  .item-list .date-day {
-    background: white;
-    color: #2442cb;
-    font-size: 16px;
-    font-weight: 600;
-    line-height: 19px; }
-  .item-list .btn-default {
-    border-color: #4460de;
-    background: #4460de;
-    color: white;
-    font-weight: 600;
-    display: inline-block;
-    margin-top: 10px;
-    margin-bottom: 10px;
-    padding: 3px 8px; }
-    .item-list .btn-default:hover, .item-list .btn-default:active {
-      background-color: #4460de;
-      border-color: #4460de;
-      color: #fff; }
diff --git a/css/reset.css b/css/reset.css
deleted file mode 100644
index 6d44268..0000000
--- a/css/reset.css
+++ /dev/null
@@ -1,44 +0,0 @@
-html, body, div, span, applet, object, iframe, h1, h2, h3, h4, h5, h6, p, blockquote, a, abbr, acronym, address, big, cite, code, del, dfn, em, img, ins, kbd, q, s, samp, small, strike, strong, sub, sup, tt, var, b, u, i, center, dl, dt, dd, ol, ul, li, fieldset, form, label, legend, table, caption, tbody, tfoot, thead, tr, th, td, article, aside, canvas, details, embed, figure, figcaption, footer, header, menu, nav, output, ruby, section, summary, time, mark, audio, video {
-  margin: 0;
-  padding: 0;
-  border: 0;
-  font-size: 100%;
-  font: inherit;
-  vertical-align: baseline; }
-
-article, aside, details, figcaption, figure, footer, header, menu, nav, section {
-  display: block; }
-
-body {
-  line-height: 1; }
-
-ol, ul {
-  list-style: none; }
-
-table {
-  border-collapse: collapse;
-  border-spacing: 0; }
-
-* {
-  box-sizing: border-box;
-  vertical-align: top; }
-
-body * {
-  position: relative; }
-
-a {
-  text-decoration: inherit;
-  color: inherit;
-  cursor: inherit; }
-
-div, span {
-  cursor: inherit; }
-
-text {
-  cursor: default; }
-
-button, input, textarea {
-  border: 0;
-  margin: 0; }
-  button:focus, input:focus, textarea:focus {
-    outline: none; }
diff --git a/css/syntax.css b/css/syntax.css
deleted file mode 100644
index 03b781b..0000000
--- a/css/syntax.css
+++ /dev/null
@@ -1,281 +0,0 @@
-.hll {
-  background-color: #ffffcc; }
-
-.c {
-  color: #93a1a1;
-  font-style: italic; }
-
-/* Comment */
-.err {
-  color: #dc322f; }
-
-/* Error */
-.g {
-  color: #657b83; }
-
-/* Generic */
-.k {
-  color: #859900; }
-
-/* Keyword */
-.l {
-  color: #657b83; }
-
-/* Literal */
-.n {
-  color: #586e75; }
-
-/* Name */
-.o {
-  color: #657b83; }
-
-/* Operator */
-.x {
-  color: #657b83; }
-
-/* Other */
-.p {
-  color: #657b83; }
-
-/* Punctuation */
-.cm {
-  color: #93a1a1;
-  font-style: italic; }
-
-/* Comment.Multiline */
-.cp {
-  color: #93a1a1;
-  font-style: italic; }
-
-/* Comment.Preproc */
-.c1 {
-  color: #93a1a1;
-  font-style: italic; }
-
-/* Comment.Single */
-.cs {
-  color: #93a1a1;
-  font-style: italic; }
-
-/* Comment.Special */
-.gd {
-  color: #657b83; }
-
-/* Generic.Deleted */
-.ge {
-  color: #657b83; }
-
-/* Generic.Emph */
-.gr {
-  color: #657b83; }
-
-/* Generic.Error */
-.gh {
-  color: #657b83; }
-
-/* Generic.Heading */
-.gi {
-  color: #657b83; }
-
-/* Generic.Inserted */
-.go {
-  color: #657b83; }
-
-/* Generic.Output */
-.gp {
-  color: #657b83; }
-
-/* Generic.Prompt */
-.gs {
-  color: #657b83; }
-
-/* Generic.Strong */
-.gu {
-  color: #657b83; }
-
-/* Generic.Subheading */
-.gt {
-  color: #657b83; }
-
-/* Generic.Traceback */
-.kc {
-  color: #859900; }
-
-/* Keyword.Constant */
-.kd {
-  color: #859900; }
-
-/* Keyword.Declaration */
-.kn {
-  color: #cb4b16; }
-
-/* Keyword.Namespace */
-.kp {
-  color: #cb4b16; }
-
-/* Keyword.Pseudo */
-.kr {
-  color: #859900; }
-
-/* Keyword.Reserved */
-.kt {
-  color: #859900; }
-
-/* Keyword.Type */
-.ld {
-  color: #657b83; }
-
-/* Literal.Date */
-.m {
-  color: #2aa198; }
-
-/* Literal.Number */
-.s {
-  color: #2aa198; }
-
-/* Literal.String */
-.na {
-  color: #657b83; }
-
-/* Name.Attribute */
-.nb {
-  color: #268bd2; }
-
-/* Name.Builtin */
-.nc {
-  color: #268bd2; }
-
-/* Name.Class */
-.no {
-  color: #b58900; }
-
-/* Name.Constant */
-.nd {
-  color: #cb4b16; }
-
-/* Name.Decorator */
-.ni {
-  color: #cb4b16; }
-
-/* Name.Entity */
-.ne {
-  color: #cb4b16; }
-
-/* Name.Exception */
-.nf {
-  color: #268bd2; }
-
-/* Name.Function */
-.nl {
-  color: #657b83; }
-
-/* Name.Label */
-.nn {
-  color: #b58900; }
-
-/* Name.Namespace */
-.nx {
-  color: #657b83; }
-
-/* Name.Other */
-.py {
-  color: #268bd2; }
-
-/* Name.Property */
-.nt {
-  color: #859900; }
-
-/* Name.Tag */
-.nv {
-  color: #cd4b16; }
-
-/* Name.Variable */
-.ow {
-  color: #859900; }
-
-/* Operator.Word */
-.w {
-  color: #fdf6e3; }
-
-/* Text.Whitespace */
-.mf {
-  color: #2aa198; }
-
-/* Literal.Number.Float */
-.mh {
-  color: #2aa198; }
-
-/* Literal.Number.Hex */
-.mi {
-  color: #2aa198; }
-
-/* Literal.Number.Integer */
-.mo {
-  color: #2aa198; }
-
-/* Literal.Number.Oct */
-.sb {
-  color: #2aa198; }
-
-/* Literal.String.Backtick */
-.sc {
-  color: #2aa198; }
-
-/* Literal.String.Char */
-.sd {
-  color: #2aa198; }
-
-/* Literal.String.Doc */
-.s2 {
-  color: #2aa198; }
-
-/* Literal.String.Double */
-.se {
-  color: #cb4b16; }
-
-/* Literal.String.Escape */
-.sh {
-  color: #2aa198; }
-
-/* Literal.String.Heredoc */
-.si {
-  color: #cb4b16; }
-
-/* Literal.String.Interpol */
-.sx {
-  color: #2aa198; }
-
-/* Literal.String.Other */
-.sr {
-  color: #2aa198; }
-
-/* Literal.String.Regex */
-.s1 {
-  color: #2aa198; }
-
-/* Literal.String.Single */
-.ss {
-  color: #2aa198; }
-
-/* Literal.String.Symbol */
-.bp {
-  color: #268bd2;
-  font-weight: bold; }
-
-/* Name.Builtin.Pseudo */
-.vc {
-  color: #268bd2; }
-
-/* Name.Variable.Class */
-.vg {
-  color: #268bd2; }
-
-/* Name.Variable.Global */
-.vi {
-  color: #268bd2; }
-
-/* Name.Variable.Instance */
-.il {
-  color: #2aa198; }
-
-/* Literal.Number.Integer.Long */
diff --git a/css/variables.css b/css/variables.css
deleted file mode 100644
index e69de29..0000000
diff --git a/docs/0.17.0/design/architecture.html b/docs/0.17.0/design/architecture.html
index 9c7a99d..6c49da3 100644
--- a/docs/0.17.0/design/architecture.html
+++ b/docs/0.17.0/design/architecture.html
@@ -222,7 +222,7 @@ or a Historical process.</li>
 <p>You can inspect the state of currently active segments using the Druid SQL
 <a href="/docs/0.17.0/querying/sql.html#segments-table"><code>sys.segments</code> table</a>. It includes the following flags:</p>
 <ul>
-<li><code>is_published</code>: True if segment metadata has been published to the metadata store and <code>used</code> is true.</li>
+<li><code>is_published</code>: True if segment metadata has been published to the metadata stored and <code>used</code> is true.</li>
 <li><code>is_available</code>: True if the segment is currently available for querying, either on a realtime task or Historical
 process.</li>
 <li><code>is_realtime</code>: True if the segment is <em>only</em> available on realtime tasks. For datasources that use realtime ingestion,
diff --git a/docs/0.17.0/development/extensions-core/avro.html b/docs/0.17.0/development/extensions-core/avro.html
index e38aa96..dd5cbaf 100644
--- a/docs/0.17.0/development/extensions-core/avro.html
+++ b/docs/0.17.0/development/extensions-core/avro.html
@@ -77,12 +77,13 @@
   ~ specific language governing permissions and limitations
   ~ under the License.
   -->
-<p>This Apache Druid extension enables Druid to ingest and understand the Apache Avro data format. Make sure to <a href="/docs/0.17.0/development/extensions.html#loading-extensions">include</a> <code>druid-avro-extensions</code> as an extension.</p>
-<p>The <code>druid-avro-extensions</code> provides two Avro Parsers for stream ingestion and Hadoop batch ingestion.
-See <a href="/docs/0.17.0/ingestion/data-formats.html#avro-hadoop-parser">Avro Hadoop Parser</a>
-and <a href="/docs/0.17.0/ingestion/data-formats.html#avro-stream-parser">Avro Stream Parser</a>
-for details.</p>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/approximate-histograms.html"><span class="arrow-prev">← </span><span>Approximate Histogram aggregators</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/bloom-filter.html"><span>Bloom Filter</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"></nav></div><footer class="nav-footer druid-footer [...]
+<h2><a class="anchor" aria-hidden="true" id="avro-extension"></a><a href="#avro-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0- [...]
+<p>This Apache Druid extension enables Druid to ingest and understand the Apache Avro data format. This extension provides
+two Avro Parsers for stream ingestion and Hadoop batch ingestion.
+See <a href="/docs/0.17.0/ingestion/data-formats.html#avro-hadoop-parser">Avro Hadoop Parser</a> and <a href="/docs/0.17.0/ingestion/data-formats.html#avro-stream-parser">Avro Stream Parser</a>
+for more details about how to use these in an ingestion spec.</p>
+<p>Make sure to <a href="/docs/0.17.0/development/extensions.html#loading-extensions">include</a> <code>druid-avro-extensions</code> as an extension.</p>
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/approximate-histograms.html"><span class="arrow-prev">← </span><span>Approximate Histogram aggregators</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/bloom-filter.html"><span>Bloom Filter</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#avro-extens [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/0.17.0/development/extensions-core/google.html b/docs/0.17.0/development/extensions-core/google.html
index c92e9c1..7112de3 100644
--- a/docs/0.17.0/development/extensions-core/google.html
+++ b/docs/0.17.0/development/extensions-core/google.html
@@ -77,10 +77,24 @@
   ~ specific language governing permissions and limitations
   ~ under the License.
   -->
+<h2><a class="anchor" aria-hidden="true" id="google-cloud-storage-extension"></a><a href="#google-cloud-storage-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 1 [...]
+<p>This extension allows you to do 2 things:</p>
+<ul>
+<li><a href="#reading-data-from-google-cloud-storage">Ingest data</a> from files stored in Google Cloud Storage.</li>
+<li>Write segments to <a href="#deep-storage">deep storage</a> in S3.</li>
+</ul>
 <p>To use this Apache Druid extension, make sure to <a href="/docs/0.17.0/development/extensions.html#loading-extensions">include</a> <code>druid-google-extensions</code> extension.</p>
-<h2><a class="anchor" aria-hidden="true" id="deep-storage"></a><a href="#deep-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
+<h3><a class="anchor" aria-hidden="true" id="required-configuration"></a><a href="#required-configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
+<p>To configure connectivity to google cloud, run druid processes with <code>GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account_keyfile</code> in the environment.</p>
+<h3><a class="anchor" aria-hidden="true" id="reading-data-from-google-cloud-storage"></a><a href="#reading-data-from-google-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1. [...]
+<p>The <a href="/docs/0.17.0/ingestion/native-batch.html#google-cloud-storage-input-source">Google Cloud Storage input source</a> is supported by the <a href="/docs/0.17.0/ingestion/native-batch.html#parallel-task">Parallel task</a>
+to read objects directly from Google Cloud Storage. If you use the <a href="/docs/0.17.0/ingestion/hadoop.html">Hadoop task</a>,
+you can read data from Google Cloud Storage by specifying the paths in your <a href="/docs/0.17.0/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
+<p>Objects can also be read directly from Google Cloud Storage via the <a href="/docs/0.17.0/ingestion/native-batch.html#staticgoogleblobstorefirehose">StaticGoogleBlobStoreFirehose</a></p>
+<h3><a class="anchor" aria-hidden="true" id="deep-storage"></a><a href="#deep-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
 <p>Deep storage can be written to Google Cloud Storage either via this extension or the <a href="/docs/0.17.0/development/extensions-core/hdfs.html">druid-hdfs-storage extension</a>.</p>
-<h3><a class="anchor" aria-hidden="true" id="configuration"></a><a href="#configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
+<h4><a class="anchor" aria-hidden="true" id="configuration"></a><a href="#configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
+<p>To configure connectivity to google cloud, run druid processes with <code>GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account_keyfile</code> in the environment.</p>
 <table>
 <thead>
 <tr><th>Property</th><th>Possible Values</th><th>Description</th><th>Default</th></tr>
@@ -91,11 +105,7 @@
 <tr><td><code>druid.google.prefix</code></td><td></td><td>GCS prefix.</td><td>No-prefix</td></tr>
 </tbody>
 </table>
-<h2><a class="anchor" aria-hidden="true" id="reading-data-from-google-cloud-storage"></a><a href="#reading-data-from-google-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1. [...]
-<p>The <a href="/docs/0.17.0/ingestion/native-batch.html#google-cloud-storage-input-source">Google Cloud Storage input source</a> is supported by the <a href="/docs/0.17.0/ingestion/native-batch.html#parallel-task">Parallel task</a>
-to read objects directly from Google Cloud Storage. If you use the <a href="/docs/0.17.0/ingestion/hadoop.html">Hadoop task</a>,
-you can read data from Google Cloud Storage by specifying the paths in your <a href="/docs/0.17.0/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/druid-lookups.html"><span class="arrow-prev">← </span><span>Cached Lookup Module</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/hdfs.html"><span>HDFS</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#deep-storage">Deep Storage</a><ul class="toc-head [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/druid-lookups.html"><span class="arrow-prev">← </span><span>Cached Lookup Module</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/hdfs.html"><span>HDFS</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#google-cloud-storage-extension">Google Cloud Stor [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/0.17.0/development/extensions-core/hdfs.html b/docs/0.17.0/development/extensions-core/hdfs.html
index e37321b..dee124a 100644
--- a/docs/0.17.0/development/extensions-core/hdfs.html
+++ b/docs/0.17.0/development/extensions-core/hdfs.html
@@ -91,26 +91,123 @@
 <tr><td><code>druid.hadoop.security.kerberos.keytab</code></td><td><code>/etc/security/keytabs/druid.headlessUser.keytab</code></td><td>Path to keytab file</td><td>empty</td></tr>
 </tbody>
 </table>
-<p>If you are using the Hadoop indexer, set your output directory to be a location on Hadoop and it will work.
+<p>Besides the above settings, you also need to include all Hadoop configuration files (such as <code>core-site.xml</code>, <code>hdfs-site.xml</code>)
+in the Druid classpath. One way to do this is copying all those files under <code>${DRUID_HOME}/conf/_common</code>.</p>
+<p>If you are using the Hadoop ingestion, set your output directory to be a location on Hadoop and it will work.
 If you want to eagerly authenticate against a secured hadoop/hdfs cluster you must set <code>druid.hadoop.security.kerberos.principal</code> and <code>druid.hadoop.security.kerberos.keytab</code>, this is an alternative to the cron job method that runs <code>kinit</code> command periodically.</p>
-<h3><a class="anchor" aria-hidden="true" id="configuration-for-google-cloud-storage"></a><a href="#configuration-for-google-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1. [...]
-<p>The HDFS extension can also be used for GCS as deep storage.</p>
+<h3><a class="anchor" aria-hidden="true" id="configuration-for-cloud-storage"></a><a href="#configuration-for-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 [...]
+<p>You can also use the AWS S3 or the Google Cloud Storage as the deep storage via HDFS.</p>
+<h4><a class="anchor" aria-hidden="true" id="configuration-for-aws-s3"></a><a href="#configuration-for-aws-s3" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-. [...]
+<p>To use the AWS S3 as the deep storage, you need to configure <code>druid.storage.storageDirectory</code> properly.</p>
 <table>
 <thead>
 <tr><th>Property</th><th>Possible Values</th><th>Description</th><th>Default</th></tr>
 </thead>
 <tbody>
 <tr><td><code>druid.storage.type</code></td><td>hdfs</td><td></td><td>Must be set.</td></tr>
-<tr><td><code>druid.storage.storageDirectory</code></td><td></td><td><a href="gs://bucket/example/directory">gs://bucket/example/directory</a></td><td>Must be set.</td></tr>
+<tr><td><code>druid.storage.storageDirectory</code></td><td><a href="s3a://bucket/example/directory">s3a://bucket/example/directory</a> or <a href="s3n://bucket/example/directory">s3n://bucket/example/directory</a></td><td>Path to the deep storage</td><td>Must be set.</td></tr>
 </tbody>
 </table>
-<p>All services that need to access GCS need to have the <a href="https://cloud.google.com/hadoop/google-cloud-storage-connector#manualinstallation">GCS connector jar</a> in their class path. One option is to place this jar in <druid>/lib/ and <druid>/extensions/druid-hdfs-storage/</p>
-<p>Tested with Druid 0.9.0, Hadoop 2.7.2 and gcs-connector jar 1.4.4-hadoop2.</p>
-<h2><a class="anchor" aria-hidden="true" id="reading-data-from-hdfs"></a><a href="#reading-data-from-hdfs" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
+<p>You also need to include the <a href="https://hadoop.apache.org/docs/current/hadoop-aws/tools/hadoop-aws/index.html">Hadoop AWS module</a>, especially the <code>hadoop-aws.jar</code> in the Druid classpath.
+Run the below command to install the <code>hadoop-aws.jar</code> file under <code>${DRUID_HOME}/extensions/druid-hdfs-storage</code> in all nodes.</p>
+<pre><code class="hljs css language-bash">java -classpath <span class="hljs-string">"<span class="hljs-variable">${DRUID_HOME}</span>lib/*"</span> org.apache.druid.cli.Main tools pull-deps -h <span class="hljs-string">"org.apache.hadoop:hadoop-aws:<span class="hljs-variable">${HADOOP_VERSION}</span>"</span>;
+cp <span class="hljs-variable">${DRUID_HOME}</span>/hadoop-dependencies/hadoop-aws/<span class="hljs-variable">${HADOOP_VERSION}</span>/hadoop-aws-<span class="hljs-variable">${HADOOP_VERSION}</span>.jar <span class="hljs-variable">${DRUID_HOME}</span>/extensions/druid-hdfs-storage/
+</code></pre>
+<p>Finally, you need to add the below properties in the <code>core-site.xml</code>.
+For more configurations, see the <a href="https://hadoop.apache.org/docs/current/hadoop-aws/tools/hadoop-aws/index.html">Hadoop AWS module</a>.</p>
+<pre><code class="hljs css language-xml"><span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.s3a.impl<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>org.apache.hadoop.fs.s3a.S3AFileSystem<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>The implementation class of the S3A Filesystem<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.AbstractFileSystem.s3a.impl<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>org.apache.hadoop.fs.s3a.S3A<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>The implementation class of the S3A AbstractFileSystem.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.s3a.access.key<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>AWS access key ID. Omit for IAM role-based or provider-based authentication.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>your access key<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.s3a.secret.key<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>AWS secret key. Omit for IAM role-based or provider-based authentication.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>your secret key<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+</code></pre>
+<h4><a class="anchor" aria-hidden="true" id="configuration-for-google-cloud-storage"></a><a href="#configuration-for-google-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1. [...]
+<p>To use the Google Cloud Storage as the deep storage, you need to configure <code>druid.storage.storageDirectory</code> properly.</p>
+<table>
+<thead>
+<tr><th>Property</th><th>Possible Values</th><th>Description</th><th>Default</th></tr>
+</thead>
+<tbody>
+<tr><td><code>druid.storage.type</code></td><td>hdfs</td><td></td><td>Must be set.</td></tr>
+<tr><td><code>druid.storage.storageDirectory</code></td><td><a href="gs://bucket/example/directory">gs://bucket/example/directory</a></td><td>Path to the deep storage</td><td>Must be set.</td></tr>
+</tbody>
+</table>
+<p>All services that need to access GCS need to have the <a href="https://cloud.google.com/dataproc/docs/concepts/connectors/cloud-storage#other_sparkhadoop_clusters">GCS connector jar</a> in their class path.
+Please read the <a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md">install instructions</a>
+to properly set up the necessary libraries and configurations.
+One option is to place this jar in <code>${DRUID_HOME}/lib/</code> and <code>${DRUID_HOME}/extensions/druid-hdfs-storage/</code>.</p>
+<p>Finally, you need to configure the <code>core-site.xml</code> file with the filesystem
+and authentication properties needed for GCS. You may want to copy the below
+example properties. Please follow the instructions at
+<a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md">https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md</a>
+for more details.
+For more configurations, <a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/conf/gcs-core-default.xml">GCS core default</a>
+and <a href="https://github.com/GoogleCloudPlatform/bdutil/blob/master/conf/hadoop2/gcs-core-template.xml">GCS core template</a>.</p>
+<pre><code class="hljs css language-xml"><span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.gs.impl<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>The FileSystem for gs: (GCS) uris.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.AbstractFileSystem.gs.impl<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>The AbstractFileSystem for gs: uris.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>google.cloud.auth.service.account.enable<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>true<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>
+    Whether to use a service account for GCS authorization.
+    Setting this property to `false` will disable use of service accounts for
+    authentication.
+  <span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>google.cloud.auth.service.account.json.keyfile<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>/path/to/keyfile<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>
+    The JSON key file of the service account used for GCS
+    access when google.cloud.auth.service.account.enable is true.
+  <span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+</code></pre>
+<p>Tested with Druid 0.17.0, Hadoop 2.8.5 and gcs-connector jar 2.0.0-hadoop2.</p>
+<h2><a class="anchor" aria-hidden="true" id="reading-data-from-hdfs-or-cloud-storage"></a><a href="#reading-data-from-hdfs-or-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2  [...]
+<h3><a class="anchor" aria-hidden="true" id="native-batch-ingestion"></a><a href="#native-batch-ingestion" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
 <p>The <a href="/docs/0.17.0/ingestion/native-batch.html#hdfs-input-source">HDFS input source</a> is supported by the <a href="/docs/0.17.0/ingestion/native-batch.html#parallel-task">Parallel task</a>
-to read objects directly from HDFS Storage. If you use the <a href="/docs/0.17.0/ingestion/hadoop.html">Hadoop task</a>,
-you can read data from HDFS by specifying the paths in your <a href="/docs/0.17.0/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/google.html"><span class="arrow-prev">← </span><span>Google Cloud Storage</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/kafka-extraction-namespace.html"><span>Apache Kafka Lookups</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#deep-storage">Deep [...]
+to read files directly from the HDFS Storage. You may be able to read objects from cloud storage
+with the HDFS input source, but we highly recommend to use a proper
+<a href="/docs/0.17.0/ingestion/native-batch.html#input-sources">Input Source</a> instead if possible because
+it is simple to set up. For now, only the <a href="/docs/0.17.0/ingestion/native-batch.html#s3-input-source">S3 input source</a>
+and the <a href="/docs/0.17.0/ingestion/native-batch.html#google-cloud-storage-input-source">Google Cloud Storage input source</a>
+are supported for cloud storage types, and so you may still want to use the HDFS input source
+to read from cloud storage other than those two.</p>
+<h3><a class="anchor" aria-hidden="true" id="hadoop-based-ingestion"></a><a href="#hadoop-based-ingestion" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
+<p>If you use the <a href="/docs/0.17.0/ingestion/hadoop.html">Hadoop ingestion</a>, you can read data from HDFS
+by specifying the paths in your <a href="/docs/0.17.0/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.
+See the <a href="/docs/0.17.0/ingestion/hadoop.html#static">Static</a> inputSpec for details.</p>
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/google.html"><span class="arrow-prev">← </span><span>Google Cloud Storage</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/kafka-extraction-namespace.html"><span>Apache Kafka Lookups</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#deep-storage">Deep [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/0.17.0/development/extensions-core/kafka-ingestion.html b/docs/0.17.0/development/extensions-core/kafka-ingestion.html
index 89d1e5d..9b99d90 100644
--- a/docs/0.17.0/development/extensions-core/kafka-ingestion.html
+++ b/docs/0.17.0/development/extensions-core/kafka-ingestion.html
@@ -269,7 +269,7 @@ MiddleManagers. A supervisor for a dataSource is started by submitting a supervi
 </tbody>
 </table>
 <h4><a class="anchor" aria-hidden="true" id="specifying-data-format"></a><a href="#specifying-data-format" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
-<p>Kafka indexing service supports both <a href="/docs/0.17.0/ingestion/data-formats.html#input-format"><code>inputFormat</code></a> and <a href="/docs/0.17.0/ingestion/data-formats.html#parser-deprecated"><code>parser</code></a> to specify the data format.
+<p>Kafka indexing service supports both <a href="/docs/0.17.0/ingestion/data-formats.html#input-format"><code>inputFormat</code></a> and <a href="/docs/0.17.0/ingestion/data-formats.html#parser"><code>parser</code></a> to specify the data format.
 The <code>inputFormat</code> is a new and recommended way to specify the data format for Kafka indexing service,
 but unfortunately, it doesn't support all data formats supported by the legacy <code>parser</code>.
 (They will be supported in the future.)</p>
diff --git a/docs/0.17.0/development/extensions-core/kinesis-ingestion.html b/docs/0.17.0/development/extensions-core/kinesis-ingestion.html
index 4d0d202..fba320e 100644
--- a/docs/0.17.0/development/extensions-core/kinesis-ingestion.html
+++ b/docs/0.17.0/development/extensions-core/kinesis-ingestion.html
@@ -272,7 +272,7 @@ and the MiddleManagers. A supervisor for a dataSource is started by submitting a
 </tbody>
 </table>
 <h4><a class="anchor" aria-hidden="true" id="specifying-data-format"></a><a href="#specifying-data-format" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
-<p>Kinesis indexing service supports both <a href="/docs/0.17.0/ingestion/data-formats.html#input-format"><code>inputFormat</code></a> and <a href="/docs/0.17.0/ingestion/data-formats.html#parser-deprecated"><code>parser</code></a> to specify the data format.
+<p>Kinesis indexing service supports both <a href="/docs/0.17.0/ingestion/data-formats.html#input-format"><code>inputFormat</code></a> and <a href="/docs/0.17.0/ingestion/data-formats.html#parser"><code>parser</code></a> to specify the data format.
 The <code>inputFormat</code> is a new and recommended way to specify the data format for Kinesis indexing service,
 but unfortunately, it doesn't support all data formats supported by the legacy <code>parser</code>.
 (They will be supported in the future.)</p>
diff --git a/docs/0.17.0/development/extensions-core/mysql.html b/docs/0.17.0/development/extensions-core/mysql.html
index d46cb49..976b6a4 100644
--- a/docs/0.17.0/development/extensions-core/mysql.html
+++ b/docs/0.17.0/development/extensions-core/mysql.html
@@ -154,7 +154,7 @@ with the location (host name and port) of the database.</p>
 <h3><a class="anchor" aria-hidden="true" id="mysql-firehose"></a><a href="#mysql-firehose" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0- [...]
 <p>The MySQL extension provides an implementation of an <a href="/docs/0.17.0/ingestion/native-batch.html#firehoses-deprecated">SqlFirehose</a> which can be used to ingest data into Druid from a MySQL database.</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span>: {
     <span class="hljs-attr">"dataSchema"</span>: {
       <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"some_datasource"</span>,
@@ -191,7 +191,7 @@ with the location (host name and port) of the database.</p>
       }
     },
     <span class="hljs-attr">"ioConfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"firehose"</span>: {
         <span class="hljs-attr">"type"</span>: <span class="hljs-string">"sql"</span>,
         <span class="hljs-attr">"database"</span>: {
@@ -208,7 +208,7 @@ with the location (host name and port) of the database.</p>
       }
     },
     <span class="hljs-attr">"tuningconfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>
     }
   }
 }
diff --git a/docs/0.17.0/development/extensions-core/orc.html b/docs/0.17.0/development/extensions-core/orc.html
index ba14461..69c4e47 100644
--- a/docs/0.17.0/development/extensions-core/orc.html
+++ b/docs/0.17.0/development/extensions-core/orc.html
@@ -77,12 +77,12 @@
   ~ specific language governing permissions and limitations
   ~ under the License.
   -->
-<p>This Apache Druid module extends <a href="/docs/0.17.0/ingestion/hadoop.html">Druid Hadoop based indexing</a> to ingest data directly from offline
-Apache ORC files.</p>
-<p>To use this extension, make sure to <a href="/docs/0.17.0/development/extensions.html#loading-extensions">include</a> <code>druid-orc-extensions</code>.</p>
-<p>The <code>druid-orc-extensions</code> provides the <a href="/docs/0.17.0/ingestion/data-formats.html#orc">ORC input format</a> and the <a href="/docs/0.17.0/ingestion/data-formats.html#orc-hadoop-parser">ORC Hadoop parser</a>
+<h2><a class="anchor" aria-hidden="true" id="orc-extension"></a><a href="#orc-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
+<p>This Apache Druid extension enables Druid to ingest and understand the Apache ORC data format.</p>
+<p>The extension provides the <a href="/docs/0.17.0/ingestion/data-formats.html#orc">ORC input format</a> and the <a href="/docs/0.17.0/ingestion/data-formats.html#orc-hadoop-parser">ORC Hadoop parser</a>
 for <a href="/docs/0.17.0/ingestion/native-batch.html">native batch ingestion</a> and <a href="/docs/0.17.0/ingestion/hadoop.html">Hadoop batch ingestion</a>, respectively.
 Please see corresponding docs for details.</p>
+<p>To use this extension, make sure to <a href="/docs/0.17.0/development/extensions.html#loading-extensions">include</a> <code>druid-orc-extensions</code>.</p>
 <h3><a class="anchor" aria-hidden="true" id="migration-from-contrib-extension"></a><a href="#migration-from-contrib-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13. [...]
 <p>This extension, first available in version 0.15.0, replaces the previous 'contrib' extension which was available until
 0.14.0-incubating. While this extension can index any data the 'contrib' extension could, the JSON spec for the
@@ -130,7 +130,7 @@ for a dimension <code>nestedData_dim1</code>, to preserve Druid schema could be
  ...
 }
 </code></pre>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/mysql.html"><span class="arrow-prev">← </span><span class="function-name-prevnext">MySQL Metadata Store</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/parquet.html"><span>Apache Parquet Extension</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"></nav></div><footer class="nav-footer drui [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/mysql.html"><span class="arrow-prev">← </span><span class="function-name-prevnext">MySQL Metadata Store</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/parquet.html"><span>Apache Parquet Extension</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#orc [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/0.17.0/development/extensions-core/postgresql.html b/docs/0.17.0/development/extensions-core/postgresql.html
index ef913dd..1e53f92 100644
--- a/docs/0.17.0/development/extensions-core/postgresql.html
+++ b/docs/0.17.0/development/extensions-core/postgresql.html
@@ -135,7 +135,7 @@ with the location (host name and port) of the database.</p>
 <h3><a class="anchor" aria-hidden="true" id="postgresql-firehose"></a><a href="#postgresql-firehose" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.2 [...]
 <p>The PostgreSQL extension provides an implementation of an <a href="/docs/0.17.0/ingestion/native-batch.html#firehoses-deprecated">SqlFirehose</a> which can be used to ingest data into Druid from a PostgreSQL database.</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span>: {
     <span class="hljs-attr">"dataSchema"</span>: {
       <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"some_datasource"</span>,
@@ -172,7 +172,7 @@ with the location (host name and port) of the database.</p>
       }
     },
     <span class="hljs-attr">"ioConfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"firehose"</span>: {
         <span class="hljs-attr">"type"</span>: <span class="hljs-string">"sql"</span>,
         <span class="hljs-attr">"database"</span>: {
@@ -189,7 +189,7 @@ with the location (host name and port) of the database.</p>
       }
     },
     <span class="hljs-attr">"tuningconfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>
     }
   }
 }
diff --git a/docs/0.17.0/development/extensions-core/s3.html b/docs/0.17.0/development/extensions-core/s3.html
index d2ecaec..2291f45 100644
--- a/docs/0.17.0/development/extensions-core/s3.html
+++ b/docs/0.17.0/development/extensions-core/s3.html
@@ -77,50 +77,39 @@
   ~ specific language governing permissions and limitations
   ~ under the License.
   -->
+<h2><a class="anchor" aria-hidden="true" id="s3-extension"></a><a href="#s3-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
+<p>This extension allows you to do 2 things:</p>
+<ul>
+<li><a href="#reading-data-from-s3">Ingest data</a> from files stored in S3.</li>
+<li>Write segments to <a href="#deep-storage">deep storage</a> in S3.</li>
+</ul>
 <p>To use this Apache Druid extension, make sure to <a href="/docs/0.17.0/development/extensions.html#loading-extensions">include</a> <code>druid-s3-extensions</code> as an extension.</p>
-<h2><a class="anchor" aria-hidden="true" id="deep-storage"></a><a href="#deep-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
+<h3><a class="anchor" aria-hidden="true" id="reading-data-from-s3"></a><a href="#reading-data-from-s3" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
+<p>The <a href="/docs/0.17.0/ingestion/native-batch.html#s3-input-source">S3 input source</a> is supported by the <a href="/docs/0.17.0/ingestion/native-batch.html#parallel-task">Parallel task</a>
+to read objects directly from S3. If you use the <a href="/docs/0.17.0/ingestion/hadoop.html">Hadoop task</a>,
+you can read data from S3 by specifying the S3 paths in your <a href="/docs/0.17.0/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
+<p>To configure the extension to read objects from S3 you need to configure how to <a href="#configuration">connect to S3</a>.</p>
+<h3><a class="anchor" aria-hidden="true" id="deep-storage"></a><a href="#deep-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
 <p>S3-compatible deep storage means either AWS S3 or a compatible service like Google Storage which exposes the same API as S3.</p>
-<h3><a class="anchor" aria-hidden="true" id="configuration"></a><a href="#configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
 <p>S3 deep storage needs to be explicitly enabled by setting <code>druid.storage.type=s3</code>. <strong>Only after setting the storage type to S3 will any of the settings below take effect.</strong></p>
-<p>The AWS SDK requires that the target region be specified. Two ways of doing this are by using the JVM system property <code>aws.region</code> or the environment variable <code>AWS_REGION</code>.</p>
-<p>As an example, to set the region to 'us-east-1' through system properties:</p>
-<ul>
-<li>Add <code>-Daws.region=us-east-1</code> to the jvm.config file for all Druid services.</li>
-<li>Add <code>-Daws.region=us-east-1</code> to <code>druid.indexer.runner.javaOpts</code> in middleManager/runtime.properties so that the property will be passed to Peon (worker) processes.</li>
-</ul>
+<p>To correctly configure this extension for deep storage in S3, first configure how to <a href="#configuration">connect to S3</a>.
+In addition to this you need to set additional configuration, specific for <a href="#deep-storage-specific-configuration">deep storage</a></p>
+<h4><a class="anchor" aria-hidden="true" id="deep-storage-specific-configuration"></a><a href="#deep-storage-specific-configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2 [...]
 <table>
 <thead>
 <tr><th>Property</th><th>Description</th><th>Default</th></tr>
 </thead>
 <tbody>
-<tr><td><code>druid.s3.accessKey</code></td><td>S3 access key. See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
-<tr><td><code>druid.s3.secretKey</code></td><td>S3 secret key. See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
-<tr><td><code>druid.s3.fileSessionCredentials</code></td><td>Path to properties file containing <code>sessionToken</code>, <code>accessKey</code> and <code>secretKey</code> value. One key/value pair per line (format <code>key=value</code>). See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
-<tr><td><code>druid.s3.protocol</code></td><td>Communication protocol type to use when sending requests to AWS. <code>http</code> or <code>https</code> can be used. This configuration would be ignored if <code>druid.s3.endpoint.url</code> is filled with a URL with a different protocol.</td><td><code>https</code></td></tr>
-<tr><td><code>druid.s3.disableChunkedEncoding</code></td><td>Disables chunked encoding. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#disableChunkedEncoding--">AWS document</a> for details.</td><td>false</td></tr>
-<tr><td><code>druid.s3.enablePathStyleAccess</code></td><td>Enables path style access. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#enablePathStyleAccess--">AWS document</a> for details.</td><td>false</td></tr>
-<tr><td><code>druid.s3.forceGlobalBucketAccessEnabled</code></td><td>Enables global bucket access. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#setForceGlobalBucketAccessEnabled-java.lang.Boolean-">AWS document</a> for details.</td><td>false</td></tr>
-<tr><td><code>druid.s3.endpoint.url</code></td><td>Service endpoint either with or without the protocol.</td><td>None</td></tr>
-<tr><td><code>druid.s3.endpoint.signingRegion</code></td><td>Region to use for SigV4 signing of requests (e.g. us-west-1).</td><td>None</td></tr>
-<tr><td><code>druid.s3.proxy.host</code></td><td>Proxy host to connect through.</td><td>None</td></tr>
-<tr><td><code>druid.s3.proxy.port</code></td><td>Port on the proxy host to connect through.</td><td>None</td></tr>
-<tr><td><code>druid.s3.proxy.username</code></td><td>User name to use when connecting through a proxy.</td><td>None</td></tr>
-<tr><td><code>druid.s3.proxy.password</code></td><td>Password to use when connecting through a proxy.</td><td>None</td></tr>
 <tr><td><code>druid.storage.bucket</code></td><td>Bucket to store in.</td><td>Must be set.</td></tr>
 <tr><td><code>druid.storage.baseKey</code></td><td>Base key prefix to use, i.e. what directory.</td><td>Must be set.</td></tr>
+<tr><td><code>druid.storage.type</code></td><td>Global deep storage provider. Must be set to <code>s3</code> to make use of this extension.</td><td>Must be set (likely <code>s3</code>).</td></tr>
 <tr><td><code>druid.storage.archiveBucket</code></td><td>S3 bucket name for archiving when running the <em>archive task</em>.</td><td>none</td></tr>
 <tr><td><code>druid.storage.archiveBaseKey</code></td><td>S3 object key prefix for archiving.</td><td>none</td></tr>
 <tr><td><code>druid.storage.disableAcl</code></td><td>Boolean flag to disable ACL. If this is set to <code>false</code>, the full control would be granted to the bucket owner. This may require to set additional permissions. See <a href="#s3-permissions-settings">S3 permissions settings</a>.</td><td>false</td></tr>
-<tr><td><code>druid.storage.sse.type</code></td><td>Server-side encryption type. Should be one of <code>s3</code>, <code>kms</code>, and <code>custom</code>. See the below <a href="#server-side-encryption">Server-side encryption section</a> for more details.</td><td>None</td></tr>
-<tr><td><code>druid.storage.sse.kms.keyId</code></td><td>AWS KMS key ID. This is used only when <code>druid.storage.sse.type</code> is <code>kms</code> and can be empty to use the default key ID.</td><td>None</td></tr>
-<tr><td><code>druid.storage.sse.custom.base64EncodedKey</code></td><td>Base64-encoded key. Should be specified if <code>druid.storage.sse.type</code> is <code>custom</code>.</td><td>None</td></tr>
-<tr><td><code>druid.storage.type</code></td><td>Global deep storage provider. Must be set to <code>s3</code> to make use of this extension.</td><td>Must be set (likely <code>s3</code>).</td></tr>
 <tr><td><code>druid.storage.useS3aSchema</code></td><td>If true, use the &quot;s3a&quot; filesystem when using Hadoop-based ingestion. If false, the &quot;s3n&quot; filesystem will be used. Only affects Hadoop-based ingestion.</td><td>false</td></tr>
 </tbody>
 </table>
-<h3><a class="anchor" aria-hidden="true" id="s3-permissions-settings"></a><a href="#s3-permissions-settings" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 [...]
-<p><code>s3:GetObject</code> and <code>s3:PutObject</code> are basically required for pushing/loading segments to/from S3.
-If <code>druid.storage.disableAcl</code> is set to <code>false</code>, then <code>s3:GetBucketAcl</code> and <code>s3:PutObjectAcl</code> are additionally required to set ACL for objects.</p>
+<h2><a class="anchor" aria-hidden="true" id="configuration"></a><a href="#configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
 <h3><a class="anchor" aria-hidden="true" id="s3-authentication-methods"></a><a href="#s3-authentication-methods" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c [...]
 <p>To connect to your S3 bucket (whether deep storage bucket or source bucket), Druid use the following credentials providers chain</p>
 <table>
@@ -140,6 +129,40 @@ If <code>druid.storage.disableAcl</code> is set to <code>false</code>, then <cod
 <p>You can find more information about authentication method <a href="https://docs.aws.amazon.com/fr_fr/sdk-for-java/v1/developer-guide/credentials.html">here</a><br/>
 <strong>Note :</strong> <em>Order is important here as it indicates the precedence of authentication methods.<br/>
 So if you are trying to use Instance profile information, you <strong>must not</strong> set <code>druid.s3.accessKey</code> and <code>druid.s3.secretKey</code> in your Druid runtime.properties</em></p>
+<h3><a class="anchor" aria-hidden="true" id="s3-permissions-settings"></a><a href="#s3-permissions-settings" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 [...]
+<p><code>s3:GetObject</code> and <code>s3:PutObject</code> are basically required for pushing/loading segments to/from S3.
+If <code>druid.storage.disableAcl</code> is set to <code>false</code>, then <code>s3:GetBucketAcl</code> and <code>s3:PutObjectAcl</code> are additionally required to set ACL for objects.</p>
+<h3><a class="anchor" aria-hidden="true" id="aws-region"></a><a href="#aws-region" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1 [...]
+<p>The AWS SDK requires that the target region be specified. Two ways of doing this are by using the JVM system property <code>aws.region</code> or the environment variable <code>AWS_REGION</code>.</p>
+<p>As an example, to set the region to 'us-east-1' through system properties:</p>
+<ul>
+<li>Add <code>-Daws.region=us-east-1</code> to the jvm.config file for all Druid services.</li>
+<li>Add <code>-Daws.region=us-east-1</code> to <code>druid.indexer.runner.javaOpts</code> in <a href="/docs/0.17.0/configuration/index.html#middlemanager-configuration">Middle Manager configuration</a> so that the property will be passed to Peon (worker) processes.</li>
+</ul>
+<h3><a class="anchor" aria-hidden="true" id="connecting-to-s3-configuration"></a><a href="#connecting-to-s3-configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 1 [...]
+<table>
+<thead>
+<tr><th>Property</th><th>Description</th><th>Default</th></tr>
+</thead>
+<tbody>
+<tr><td><code>druid.s3.accessKey</code></td><td>S3 access key. See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
+<tr><td><code>druid.s3.secretKey</code></td><td>S3 secret key. See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
+<tr><td><code>druid.s3.fileSessionCredentials</code></td><td>Path to properties file containing <code>sessionToken</code>, <code>accessKey</code> and <code>secretKey</code> value. One key/value pair per line (format <code>key=value</code>). See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
+<tr><td><code>druid.s3.protocol</code></td><td>Communication protocol type to use when sending requests to AWS. <code>http</code> or <code>https</code> can be used. This configuration would be ignored if <code>druid.s3.endpoint.url</code> is filled with a URL with a different protocol.</td><td><code>https</code></td></tr>
+<tr><td><code>druid.s3.disableChunkedEncoding</code></td><td>Disables chunked encoding. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#disableChunkedEncoding--">AWS document</a> for details.</td><td>false</td></tr>
+<tr><td><code>druid.s3.enablePathStyleAccess</code></td><td>Enables path style access. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#enablePathStyleAccess--">AWS document</a> for details.</td><td>false</td></tr>
+<tr><td><code>druid.s3.forceGlobalBucketAccessEnabled</code></td><td>Enables global bucket access. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#setForceGlobalBucketAccessEnabled-java.lang.Boolean-">AWS document</a> for details.</td><td>false</td></tr>
+<tr><td><code>druid.s3.endpoint.url</code></td><td>Service endpoint either with or without the protocol.</td><td>None</td></tr>
+<tr><td><code>druid.s3.endpoint.signingRegion</code></td><td>Region to use for SigV4 signing of requests (e.g. us-west-1).</td><td>None</td></tr>
+<tr><td><code>druid.s3.proxy.host</code></td><td>Proxy host to connect through.</td><td>None</td></tr>
+<tr><td><code>druid.s3.proxy.port</code></td><td>Port on the proxy host to connect through.</td><td>None</td></tr>
+<tr><td><code>druid.s3.proxy.username</code></td><td>User name to use when connecting through a proxy.</td><td>None</td></tr>
+<tr><td><code>druid.s3.proxy.password</code></td><td>Password to use when connecting through a proxy.</td><td>None</td></tr>
+<tr><td><code>druid.storage.sse.type</code></td><td>Server-side encryption type. Should be one of <code>s3</code>, <code>kms</code>, and <code>custom</code>. See the below <a href="#server-side-encryption">Server-side encryption section</a> for more details.</td><td>None</td></tr>
+<tr><td><code>druid.storage.sse.kms.keyId</code></td><td>AWS KMS key ID. This is used only when <code>druid.storage.sse.type</code> is <code>kms</code> and can be empty to use the default key ID.</td><td>None</td></tr>
+<tr><td><code>druid.storage.sse.custom.base64EncodedKey</code></td><td>Base64-encoded key. Should be specified if <code>druid.storage.sse.type</code> is <code>custom</code>.</td><td>None</td></tr>
+</tbody>
+</table>
 <h2><a class="anchor" aria-hidden="true" id="server-side-encryption"></a><a href="#server-side-encryption" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
 <p>You can enable <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/serv-side-encryption.html">server-side encryption</a> by setting
 <code>druid.storage.sse.type</code> to a supported type of server-side encryption. The current supported types are:</p>
@@ -148,11 +171,7 @@ So if you are trying to use Instance profile information, you <strong>must not</
 <li>kms: <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingKMSEncryption.html">Server-side encryption with AWS KMS–Managed Keys</a></li>
 <li>custom: <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html">Server-side encryption with Customer-Provided Encryption Keys</a></li>
 </ul>
-<h2><a class="anchor" aria-hidden="true" id="reading-data-from-s3"></a><a href="#reading-data-from-s3" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
-<p>The <a href="/docs/0.17.0/ingestion/native-batch.html#s3-input-source">S3 input source</a> is supported by the <a href="/docs/0.17.0/ingestion/native-batch.html#parallel-task">Parallel task</a>
-to read objects directly from S3. If you use the <a href="/docs/0.17.0/ingestion/hadoop.html">Hadoop task</a>,
-you can read data from S3 by specifying the S3 paths in your <a href="/docs/0.17.0/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/protobuf.html"><span class="arrow-prev">← </span><span>Protobuf</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/simple-client-sslcontext.html"><span>Simple SSLContext Provider Module</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#deep-storage">Dee [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/development/extensions-core/protobuf.html"><span class="arrow-prev">← </span><span>Protobuf</span></a><a class="docs-next button" href="/docs/0.17.0/development/extensions-core/simple-client-sslcontext.html"><span>Simple SSLContext Provider Module</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#s3-extension">S3  [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/0.17.0/development/modules.html b/docs/0.17.0/development/modules.html
index fc6c0c8..d6c2643 100644
--- a/docs/0.17.0/development/modules.html
+++ b/docs/0.17.0/development/modules.html
@@ -189,9 +189,9 @@ in <a href="/docs/0.17.0/ingestion/native-batch.html">native parallel indexing</
 }
 </code></pre>
 <p>This is registering the InputSource with Jackson's polymorphic serialization/deserialization layer.  More concretely, having this will mean that if you specify a <code>&quot;inputSource&quot;: { &quot;type&quot;: &quot;s3&quot;, ... }</code> in your IO config, then the system will load this InputSource for your <code>InputSource</code> implementation.</p>
-<p>Note that inside of Druid, we have made the @JacksonInject annotation for Jackson deserialized objects actually use the base Guice injector to resolve the object to be injected.  So, if your InputSource needs access to some object, you can add a @JacksonInject annotation on a setter and it will get set on instantiation.</p>
+<p>Note that inside of Druid, we have made the <code>@JacksonInject</code> annotation for Jackson deserialized objects actually use the base Guice injector to resolve the object to be injected.  So, if your InputSource needs access to some object, you can add a <code>@JacksonInject</code> annotation on a setter and it will get set on instantiation.</p>
 <h3><a class="anchor" aria-hidden="true" id="adding-support-for-a-new-data-format"></a><a href="#adding-support-for-a-new-data-format" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 [...]
-<p>Adding support for a new data format requires to implement two interfaces, i.e., <code>InputFormat</code> and <code>InputEntityReader</code>.
+<p>Adding support for a new data format requires implementing two interfaces, i.e., <code>InputFormat</code> and <code>InputEntityReader</code>.
 <code>InputFormat</code> is to define how your data is formatted. <code>InputEntityReader</code> is to define how to parse your data and convert into Druid <code>InputRow</code>.</p>
 <p>There is an example in the <code>druid-orc-extensions</code> module with the <code>OrcInputFormat</code> and <code>OrcReader</code>.</p>
 <p>Adding an InputFormat is very similar to adding an InputSource. They operate purely through Jackson and thus should just be additions to the Jackson modules returned by your DruidModule.</p>
diff --git a/docs/0.17.0/ingestion/data-formats.html b/docs/0.17.0/ingestion/data-formats.html
index 7e50db7..88b7d65 100644
--- a/docs/0.17.0/ingestion/data-formats.html
+++ b/docs/0.17.0/ingestion/data-formats.html
@@ -114,7 +114,7 @@ parsing data will not be as efficient as writing a native Java parser or using a
 <blockquote>
 <p>The Input Format is a new way to specify the data format of your input data which was introduced in 0.17.0.
 Unfortunately, the Input Format doesn't support all data formats or ingestion methods supported by Druid yet.
-Especially if you want to use the Hadoop ingestion, you still need to use the <a href="#parser-deprecated">Parser</a>.
+Especially if you want to use the Hadoop ingestion, you still need to use the <a href="#parser">Parser</a>.
 If your data is formatted in some format not listed in this section, please consider using the Parser instead.</p>
 </blockquote>
 <p>All forms of Druid ingestion require some form of schema object. The format of the data to be ingested is specified using the <code>inputFormat</code> entry in your <a href="/docs/0.17.0/ingestion/index.html#ioconfig"><code>ioConfig</code></a>.</p>
@@ -310,19 +310,17 @@ and <code>parquet</code>.</p>
 <li><a href="http://jsonpath.herokuapp.com/">http://jsonpath.herokuapp.com/</a> is useful for testing <code>path</code>-type expressions.</li>
 <li>jackson-jq supports a subset of the full <a href="https://stedolan.github.io/jq/">jq</a> syntax.  Please refer to the <a href="https://github.com/eiiches/jackson-jq">jackson-jq documentation</a> for details.</li>
 </ul>
-<h2><a class="anchor" aria-hidden="true" id="parser-deprecated"></a><a href="#parser-deprecated" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2- [...]
+<h2><a class="anchor" aria-hidden="true" id="parser"></a><a href="#parser" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2. [...]
 <blockquote>
-<p>Even though the Parser is deprecated, it is still useful especially
-for particular ingestion methods or data formats. For example, you
-should use the Parser with Hadoop ingestion since the <a href="#input-format"><code>inputFormat</code></a>
-is not supported yet with Hadoop ingestion.
-Some data formats such as Protocol Buffer or Avro are not supported by the <code>inputFormat</code> yet as well.</p>
+<p>The Parser is deprecated for <a href="/docs/0.17.0/ingestion/native-batch.html">native batch tasks</a>, <a href="/docs/0.17.0/development/extensions-core/kafka-ingestion.html">Kafka indexing service</a>,
+and <a href="/docs/0.17.0/development/extensions-core/kinesis-ingestion.html">Kinesis indexing service</a>.
+Consider using the <a href="#input-format">input format</a> instead for these types of ingestion.</p>
 </blockquote>
 <p>This section lists all default and core extension parsers.
 For community extension parsers, please see our <a href="../development/extensions.html#community-extensions">community extensions list</a>.</p>
 <h3><a class="anchor" aria-hidden="true" id="string-parser"></a><a href="#string-parser" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
 <p><code>string</code> typed parsers operate on text based inputs that can be split into individual records by newlines.
-Each line can be further parsed using <a href="#parsespec-deprecated"><code>parseSpec</code></a>.</p>
+Each line can be further parsed using <a href="#parsespec"><code>parseSpec</code></a>.</p>
 <table>
 <thead>
 <tr><th>Field</th><th>Type</th><th>Description</th><th>Required</th></tr>
@@ -1027,12 +1025,11 @@ Details can be found in Schema Registry <a href="http://docs.confluent.io/curren
 </code></pre>
 <p>See the <a href="/docs/0.17.0/development/extensions-core/protobuf.html">extension description</a> for
 more details and examples.</p>
-<h2><a class="anchor" aria-hidden="true" id="parsespec-deprecated"></a><a href="#parsespec-deprecated" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
+<h2><a class="anchor" aria-hidden="true" id="parsespec"></a><a href="#parsespec" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.6 [...]
 <blockquote>
-<p>Even though the ParseSpec is deprecated, it is still useful especially
-for particular ingestion methods or data formats. For example, you
-should use the ParseSpec with Hadoop ingestion since the <a href="#input-format"><code>inputFormat</code></a>
-is not supported yet with Hadoop ingestion.</p>
+<p>The Parser is deprecated for <a href="/docs/0.17.0/ingestion/native-batch.html">native batch tasks</a>, <a href="/docs/0.17.0/development/extensions-core/kafka-ingestion.html">Kafka indexing service</a>,
+and <a href="/docs/0.17.0/development/extensions-core/kinesis-ingestion.html">Kinesis indexing service</a>.
+Consider using the <a href="#input-format">input format</a> instead for these types of ingestion.</p>
 </blockquote>
 <p>ParseSpecs serve two purposes:</p>
 <ul>
@@ -1231,7 +1228,7 @@ handle all formatting decisions on their own, without using the ParseSpec.</p>
 <tr><td>flattenSpec</td><td>JSON Object</td><td>Specifies flattening configuration for nested JSON data. See <a href="#flattenspec"><code>flattenSpec</code></a> for more info.</td><td>no</td></tr>
 </tbody>
 </table>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/ingestion/index.html"><span class="arrow-prev">← </span><span>Ingestion</span></a><a class="docs-next button" href="/docs/0.17.0/ingestion/schema-design.html"><span>Schema design tips</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#formatting-the-data">Formatting the Data</a></li><li><a href="#custom-formats">Cu [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/ingestion/index.html"><span class="arrow-prev">← </span><span>Ingestion</span></a><a class="docs-next button" href="/docs/0.17.0/ingestion/schema-design.html"><span>Schema design tips</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#formatting-the-data">Formatting the Data</a></li><li><a href="#custom-formats">Cu [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/0.17.0/ingestion/hadoop.html b/docs/0.17.0/ingestion/hadoop.html
index b81d0bd..8f44e58 100644
--- a/docs/0.17.0/ingestion/hadoop.html
+++ b/docs/0.17.0/ingestion/hadoop.html
@@ -194,11 +194,38 @@ what it should contain.</p>
 <p>For example, using the static input paths:</p>
 <pre><code class="hljs"><span class="hljs-string">"paths"</span> : "<span class="hljs-type">hdfs</span>://path/to/data/<span class="hljs-keyword">is</span>/here/data.gz,hdfs://path/to/data/<span class="hljs-keyword">is</span>/here/moredata.gz,hdfs://path/to/data/<span class="hljs-keyword">is</span>/here/evenmoredata.gz<span class="hljs-string">"
 </span></code></pre>
-<p>You can also read from cloud storage such as AWS S3 or Google Cloud Storage.</p>
+<p>You can also read from cloud storage such as AWS S3 or Google Cloud Storage.
+To do so, you need to install the necessary library under Druid's classpath in <em>all MiddleManager or Indexer processes</em>.
+For S3, you can run the below command to install the <a href="https://hadoop.apache.org/docs/current/hadoop-aws/tools/hadoop-aws/index.html">Hadoop AWS module</a>.</p>
+<pre><code class="hljs css language-bash">java -classpath <span class="hljs-string">"<span class="hljs-variable">${DRUID_HOME}</span>lib/*"</span> org.apache.druid.cli.Main tools pull-deps -h <span class="hljs-string">"org.apache.hadoop:hadoop-aws:<span class="hljs-variable">${HADOOP_VERSION}</span>"</span>;
+cp <span class="hljs-variable">${DRUID_HOME}</span>/hadoop-dependencies/hadoop-aws/<span class="hljs-variable">${HADOOP_VERSION}</span>/hadoop-aws-<span class="hljs-variable">${HADOOP_VERSION}</span>.jar <span class="hljs-variable">${DRUID_HOME}</span>/extensions/druid-hdfs-storage/
+</code></pre>
+<p>Once you install the Hadoop AWS module in all MiddleManager and Indexer processes, you can put
+your S3 paths in the inputSpec with the below job properties.
+For more configurations, see the <a href="https://hadoop.apache.org/docs/current/hadoop-aws/tools/hadoop-aws/index.html">Hadoop AWS module</a>.</p>
 <pre><code class="hljs"><span class="hljs-string">"paths"</span> : "<span class="hljs-type">s3a</span>://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/data.gz,s3a://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/moredata.gz,s3a://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/evenmoredata.gz<span class="hljs-string">"
 </span></code></pre>
+<pre><code class="hljs css language-json">"jobProperties" : {
+  "fs.s3a.impl" : "org.apache.hadoop.fs.s3a.S3AFileSystem",
+  "fs.AbstractFileSystem.s3a.impl" : "org.apache.hadoop.fs.s3a.S3A",
+  "fs.s3a.access.key" : "YOUR_ACCESS_KEY",
+  "fs.s3a.secret.key" : "YOUR_SECRET_KEY"
+}
+</code></pre>
+<p>For Google Cloud Storage, you need to install <a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md">GCS connector jar</a>
+under <code>${DRUID_HOME}/hadoop-dependencies</code> in <em>all MiddleManager or Indexer processes</em>.
+Once you install the GCS Connector jar in all MiddleManager and Indexer processes, you can put
+your Google Cloud Storage paths in the inputSpec with the below job properties.
+For more configurations, see the <a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md#configure-hadoop">instructions to configure Hadoop</a>,
+<a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/conf/gcs-core-default.xml">GCS core default</a>
+and <a href="https://github.com/GoogleCloudPlatform/bdutil/blob/master/conf/hadoop2/gcs-core-template.xml">GCS core template</a>.</p>
 <pre><code class="hljs"><span class="hljs-string">"paths"</span> : "<span class="hljs-type">gs</span>://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/data.gz,gs://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/moredata.gz,gs://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/evenmoredata.gz<span class="hljs-string">"
 </span></code></pre>
+<pre><code class="hljs css language-json">"jobProperties" : {
+  "fs.gs.impl" : "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem",
+  "fs.AbstractFileSystem.gs.impl" : "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS"
+}
+</code></pre>
 <h4><a class="anchor" aria-hidden="true" id="granularity"></a><a href="#granularity" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42 [...]
 <p>A type of inputSpec that expects data to be organized in directories according to datetime using the path format: <code>y=XXXX/m=XX/d=XX/H=XX/M=XX/S=XX</code> (where date is represented by lowercase and time is represented by uppercase).</p>
 <table>
diff --git a/docs/0.17.0/ingestion/index.html b/docs/0.17.0/ingestion/index.html
index dff58c3..9e5b074 100644
--- a/docs/0.17.0/ingestion/index.html
+++ b/docs/0.17.0/ingestion/index.html
@@ -306,9 +306,9 @@ documentation for each <a href="#ingestion-methods">ingestion method</a>.</li>
 <li><a href="#tuningconfig"><code>tuningConfig</code></a>, which controls various tuning parameters specific to each
 <a href="#ingestion-methods">ingestion method</a>.</li>
 </ul>
-<p>Example ingestion spec for task type <code>parallel_index</code> (native batch):</p>
+<p>Example ingestion spec for task type <code>index_parallel</code> (native batch):</p>
 <pre><code class="hljs">{
-  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"parallel_index"</span>,
+  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span>: {
     <span class="hljs-attr">"dataSchema"</span>: {
       <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"wikipedia"</span>,
@@ -337,7 +337,7 @@ documentation for each <a href="#ingestion-methods">ingestion method</a>.</li>
       }
     },
     <span class="hljs-attr">"ioConfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"parallel_index"</span>,
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"inputSource"</span>: {
         <span class="hljs-attr">"type"</span>: <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span>: <span class="hljs-string">"examples/indexing/"</span>,
@@ -354,7 +354,7 @@ documentation for each <a href="#ingestion-methods">ingestion method</a>.</li>
       }
     },
     <span class="hljs-attr">"tuningConfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"parallel_index"</span>
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>
     }
   }
 }
diff --git a/docs/0.17.0/ingestion/native-batch.html b/docs/0.17.0/ingestion/native-batch.html
index 2c02732..753f776 100644
--- a/docs/0.17.0/ingestion/native-batch.html
+++ b/docs/0.17.0/ingestion/native-batch.html
@@ -358,6 +358,10 @@ falling in the same time chunk and the same hash value from multiple MiddleManag
 them to create the final segments. Finally, they push the final segments to the deep storage at once.</li>
 </ul>
 <h4><a class="anchor" aria-hidden="true" id="single-dimension-range-partitioning"></a><a href="#single-dimension-range-partitioning" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2 [...]
+<blockquote>
+<p>Single dimension range partitioning is currently not supported in the sequential mode of the Parallel task.
+Try set <code>maxNumConcurrentSubTasks</code> to larger than 1 to use this partitioning.</p>
+</blockquote>
 <table>
 <thead>
 <tr><th>property</th><th>description</th><th>default</th><th>required?</th></tr>
@@ -1037,6 +1041,9 @@ where each worker task of <code>index_parallel</code> will read a single file.</
 <tr><td>paths</td><td>HDFS paths. Can be either a JSON array or comma-separated string of paths. Wildcards like <code>*</code> are supported in these paths.</td><td>None</td><td>yes</td></tr>
 </tbody>
 </table>
+<p>You can also ingest from cloud storage using the HDFS input source.
+However, if you want to read from AWS S3 or Google Cloud Storage, consider using
+the <a href="#s3-input-source">S3 input source</a> or the <a href="#google-cloud-storage-input-source">Google Cloud Storage input source</a> instead.</p>
 <h3><a class="anchor" aria-hidden="true" id="http-input-source"></a><a href="#http-input-source" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2- [...]
 <p>The HDFS input source is to support reading files directly
 from remote sites via HTTP.
diff --git a/docs/0.17.0/operations/other-hadoop.html b/docs/0.17.0/operations/other-hadoop.html
index 416a541..3e1f0f4 100644
--- a/docs/0.17.0/operations/other-hadoop.html
+++ b/docs/0.17.0/operations/other-hadoop.html
@@ -101,7 +101,7 @@ there is a conflict.</p>
 <p>These properties can be set in either one of the following ways:</p>
 <ul>
 <li>Using the task definition, e.g. add <code>&quot;mapreduce.job.classloader&quot;: &quot;true&quot;</code> to the <code>jobProperties</code> of the <code>tuningConfig</code> of your indexing task (see the <a href="/docs/0.17.0/ingestion/hadoop.html">Hadoop batch ingestion documentation</a>).</li>
-<li>Using system properties, e.g. on the MiddleManager set <code>druid.indexer.runner.javaOpts=... -Dhadoop.mapreduce.job.classloader=true</code>.</li>
+<li>Using system properties, e.g. on the MiddleManager set <code>druid.indexer.runner.javaOpts=... -Dhadoop.mapreduce.job.classloader=true</code> in <a href="/docs/0.17.0/configuration/index.html#middlemanager-configuration">Middle Manager configuration</a>.</li>
 </ul>
 <h3><a class="anchor" aria-hidden="true" id="overriding-specific-classes"></a><a href="#overriding-specific-classes" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 1 [...]
 <p>When <code>mapreduce.job.classloader = true</code>, it is also possible to specifically define which classes should be loaded from the hadoop system classpath and which should be loaded from job-supplied JARs.</p>
diff --git a/docs/0.17.0/querying/aggregations.html b/docs/0.17.0/querying/aggregations.html
index 9b8048f..0ddd5cb 100644
--- a/docs/0.17.0/querying/aggregations.html
+++ b/docs/0.17.0/querying/aggregations.html
@@ -136,7 +136,7 @@ query time.</p>
 <p>(Double/Float/Long) First and Last aggregator cannot be used in ingestion spec, and should only be specified as part of queries.</p>
 <p>Note that queries with first/last aggregators on a segment created with rollup enabled will return the rolled up value, and not the last value within the raw ingested data.</p>
 <h4><a class="anchor" aria-hidden="true" id="doublefirst-aggregator"></a><a href="#doublefirst-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
-<p><code>doubleFirst</code> computes the metric value with the minimum timestamp or 0 if no row exist</p>
+<p><code>doubleFirst</code> computes the metric value with the minimum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"doubleFirst"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -144,7 +144,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="doublelast-aggregator"></a><a href="#doublelast-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2 [...]
-<p><code>doubleLast</code> computes the metric value with the maximum timestamp or 0 if no row exist</p>
+<p><code>doubleLast</code> computes the metric value with the maximum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"doubleLast"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -152,7 +152,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="floatfirst-aggregator"></a><a href="#floatfirst-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2 [...]
-<p><code>floatFirst</code> computes the metric value with the minimum timestamp or 0 if no row exist</p>
+<p><code>floatFirst</code> computes the metric value with the minimum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"floatFirst"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -160,7 +160,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="floatlast-aggregator"></a><a href="#floatlast-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
-<p><code>floatLast</code> computes the metric value with the maximum timestamp or 0 if no row exist</p>
+<p><code>floatLast</code> computes the metric value with the maximum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"floatLast"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -168,7 +168,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="longfirst-aggregator"></a><a href="#longfirst-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
-<p><code>longFirst</code> computes the metric value with the minimum timestamp or 0 if no row exist</p>
+<p><code>longFirst</code> computes the metric value with the minimum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"longFirst"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -176,7 +176,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="longlast-aggregator"></a><a href="#longlast-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.2 [...]
-<p><code>longLast</code> computes the metric value with the maximum timestamp or 0 if no row exist</p>
+<p><code>longLast</code> computes the metric value with the maximum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"longLast"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -189,8 +189,7 @@ query time.</p>
   "type" : "stringFirst",
   "name" : &lt;output_name&gt;,
   "fieldName" : &lt;metric_name&gt;,
-  "maxStringBytes" : &lt;integer&gt; # (optional, defaults to 1024),
-  "filterNullValues" : &lt;boolean&gt; # (optional, defaults to false)
+  "maxStringBytes" : &lt;integer&gt; # (optional, defaults to 1024)
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="stringlast-aggregator"></a><a href="#stringlast-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2 [...]
@@ -199,8 +198,7 @@ query time.</p>
   "type" : "stringLast",
   "name" : &lt;output_name&gt;,
   "fieldName" : &lt;metric_name&gt;,
-  "maxStringBytes" : &lt;integer&gt; # (optional, defaults to 1024),
-  "filterNullValues" : &lt;boolean&gt; # (optional, defaults to false)
+  "maxStringBytes" : &lt;integer&gt; # (optional, defaults to 1024)
 }
 </code></pre>
 <h3><a class="anchor" aria-hidden="true" id="javascript-aggregator"></a><a href="#javascript-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2 [...]
diff --git a/docs/0.17.0/querying/sql.html b/docs/0.17.0/querying/sql.html
index ea85ead..994a02f 100644
--- a/docs/0.17.0/querying/sql.html
+++ b/docs/0.17.0/querying/sql.html
@@ -231,9 +231,9 @@ possible for two aggregators in the same SQL query to have different filters.</p
 <tr><td><code>STDDEV_POP(expr)</code></td><td>Computes standard deviation population of <code>expr</code>. See <a href="../development/extensions-core/stats.html">stats extension</a> documentation for additional details.</td></tr>
 <tr><td><code>STDDEV_SAMP(expr)</code></td><td>Computes standard deviation sample of <code>expr</code>. See <a href="../development/extensions-core/stats.html">stats extension</a> documentation for additional details.</td></tr>
 <tr><td><code>STDDEV(expr)</code></td><td>Computes standard deviation sample of <code>expr</code>. See <a href="../development/extensions-core/stats.html">stats extension</a> documentation for additional details.</td></tr>
-<tr><td><code>EARLIEST(expr)</code></td><td>Returns the earliest non-null value of <code>expr</code>, which must be numeric. If <code>expr</code> comes from a relation with a timestamp column (like a Druid datasource) then &quot;earliest&quot; is the value first encountered with the minimum overall timestamp of all values being aggregated. If <code>expr</code> does not come from a relation with a timestamp, then it is simply the first value encountered.</td></tr>
+<tr><td><code>EARLIEST(expr)</code></td><td>Returns the earliest value of <code>expr</code>, which must be numeric. If <code>expr</code> comes from a relation with a timestamp column (like a Druid datasource) then &quot;earliest&quot; is the value first encountered with the minimum overall timestamp of all values being aggregated. If <code>expr</code> does not come from a relation with a timestamp, then it is simply the first value encountered.</td></tr>
 <tr><td><code>EARLIEST(expr, maxBytesPerString)</code></td><td>Like <code>EARLIEST(expr)</code>, but for strings. The <code>maxBytesPerString</code> parameter determines how much aggregation space to allocate per string. Strings longer than this limit will be truncated. This parameter should be set as low as possible, since high values will lead to wasted memory.</td></tr>
-<tr><td><code>LATEST(expr)</code></td><td>Returns the latest non-null value of <code>expr</code>, which must be numeric. If <code>expr</code> comes from a relation with a timestamp column (like a Druid datasource) then &quot;latest&quot; is the value last encountered with the maximum overall timestamp of all values being aggregated. If <code>expr</code> does not come from a relation with a timestamp, then it is simply the last value encountered.</td></tr>
+<tr><td><code>LATEST(expr)</code></td><td>Returns the latest value of <code>expr</code>, which must be numeric. If <code>expr</code> comes from a relation with a timestamp column (like a Druid datasource) then &quot;latest&quot; is the value last encountered with the maximum overall timestamp of all values being aggregated. If <code>expr</code> does not come from a relation with a timestamp, then it is simply the last value encountered.</td></tr>
 <tr><td><code>LATEST(expr, maxBytesPerString)</code></td><td>Like <code>LATEST(expr)</code>, but for strings. The <code>maxBytesPerString</code> parameter determines how much aggregation space to allocate per string. Strings longer than this limit will be truncated. This parameter should be set as low as possible, since high values will lead to wasted memory.</td></tr>
 </tbody>
 </table>
diff --git a/docs/0.17.0/tutorials/tutorial-batch.html b/docs/0.17.0/tutorials/tutorial-batch.html
index 1d8bd74..a6005d4 100644
--- a/docs/0.17.0/tutorials/tutorial-batch.html
+++ b/docs/0.17.0/tutorials/tutorial-batch.html
@@ -142,42 +142,36 @@ At this point, you can go to the <code>Query</code> view to run SQL queries agai
 <p>The Druid package includes the following sample native batch ingestion task spec at <code>quickstart/tutorial/wikipedia-index.json</code>, shown here for convenience,
 which has been configured to read the <code>quickstart/tutorial/wikiticker-2015-09-12-sampled.json.gz</code> input file:</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"wikipedia"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span> : [
-              <span class="hljs-string">"channel"</span>,
-              <span class="hljs-string">"cityName"</span>,
-              <span class="hljs-string">"comment"</span>,
-              <span class="hljs-string">"countryIsoCode"</span>,
-              <span class="hljs-string">"countryName"</span>,
-              <span class="hljs-string">"isAnonymous"</span>,
-              <span class="hljs-string">"isMinor"</span>,
-              <span class="hljs-string">"isNew"</span>,
-              <span class="hljs-string">"isRobot"</span>,
-              <span class="hljs-string">"isUnpatrolled"</span>,
-              <span class="hljs-string">"metroCode"</span>,
-              <span class="hljs-string">"namespace"</span>,
-              <span class="hljs-string">"page"</span>,
-              <span class="hljs-string">"regionIsoCode"</span>,
-              <span class="hljs-string">"regionName"</span>,
-              <span class="hljs-string">"user"</span>,
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"added"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"deleted"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"delta"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
-            ]
-          },
-          <span class="hljs-attr">"timestampSpec"</span>: {
-            <span class="hljs-attr">"column"</span>: <span class="hljs-string">"time"</span>,
-            <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
-          }
-        }
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span> : [
+          <span class="hljs-string">"channel"</span>,
+          <span class="hljs-string">"cityName"</span>,
+          <span class="hljs-string">"comment"</span>,
+          <span class="hljs-string">"countryIsoCode"</span>,
+          <span class="hljs-string">"countryName"</span>,
+          <span class="hljs-string">"isAnonymous"</span>,
+          <span class="hljs-string">"isMinor"</span>,
+          <span class="hljs-string">"isNew"</span>,
+          <span class="hljs-string">"isRobot"</span>,
+          <span class="hljs-string">"isUnpatrolled"</span>,
+          <span class="hljs-string">"metroCode"</span>,
+          <span class="hljs-string">"namespace"</span>,
+          <span class="hljs-string">"page"</span>,
+          <span class="hljs-string">"regionIsoCode"</span>,
+          <span class="hljs-string">"regionName"</span>,
+          <span class="hljs-string">"user"</span>,
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"added"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"deleted"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"delta"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
+        ]
+      },
+      <span class="hljs-attr">"timestampSpec"</span>: {
+        <span class="hljs-attr">"column"</span>: <span class="hljs-string">"time"</span>,
+        <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
       },
       <span class="hljs-attr">"metricsSpec"</span> : [],
       <span class="hljs-attr">"granularitySpec"</span> : {
@@ -189,16 +183,19 @@ which has been configured to read the <code>quickstart/tutorial/wikiticker-2015-
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/tutorial/"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"wikiticker-2015-09-12-sampled.json.gz"</span>
       },
+      <span class="hljs-attr">"inputFormat"</span> :  {
+        <span class="hljs-attr">"type"</span>: <span class="hljs-string">"json"</span>
+      },
       <span class="hljs-attr">"appendToExisting"</span> : <span class="hljs-literal">false</span>
     },
     <span class="hljs-attr">"tuningConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
       <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>
     }
diff --git a/docs/0.17.0/tutorials/tutorial-compaction.html b/docs/0.17.0/tutorials/tutorial-compaction.html
index 0c650a4..04a63c0 100644
--- a/docs/0.17.0/tutorials/tutorial-compaction.html
+++ b/docs/0.17.0/tutorials/tutorial-compaction.html
@@ -114,7 +114,7 @@ Retrieved 1 row <span class="hljs-keyword">in</span> 1.38s.
   <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"compaction-tutorial"</span>,
   <span class="hljs-attr">"interval"</span>: <span class="hljs-string">"2015-09-12/2015-09-13"</span>,
   <span class="hljs-attr">"tuningConfig"</span> : {
-    <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+    <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
     <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
     <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>
   }
@@ -153,7 +153,7 @@ Retrieved 1 row <span class="hljs-keyword">in</span> 1.30s.
   <span class="hljs-attr">"interval"</span>: <span class="hljs-string">"2015-09-12/2015-09-13"</span>,
   <span class="hljs-attr">"segmentGranularity"</span>: <span class="hljs-string">"DAY"</span>,
   <span class="hljs-attr">"tuningConfig"</span> : {
-    <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+    <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
     <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
     <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>,
     <span class="hljs-attr">"forceExtendableShardSpecs"</span> : <span class="hljs-literal">true</span>
diff --git a/docs/0.17.0/tutorials/tutorial-ingestion-spec.html b/docs/0.17.0/tutorials/tutorial-ingestion-spec.html
index cfbd63f..496bde2 100644
--- a/docs/0.17.0/tutorials/tutorial-ingestion-spec.html
+++ b/docs/0.17.0/tutorials/tutorial-ingestion-spec.html
@@ -120,33 +120,14 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 }
 </code></pre>
 <p>Let's call the tutorial datasource <code>ingestion-tutorial</code>.</p>
-<h3><a class="anchor" aria-hidden="true" id="choose-a-parser"></a><a href="#choose-a-parser" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5  [...]
-<p>A <code>dataSchema</code> has a <code>parser</code> field, which defines the parser that Druid will use to interpret the input data.</p>
-<p>Since our input data is represented as JSON strings, we'll use a <code>string</code> parser with <code>json</code> format:</p>
-<pre><code class="hljs css language-json">"dataSchema" : {
-  "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json"
-    }
-  }
-}
-</code></pre>
 <h3><a class="anchor" aria-hidden="true" id="time-column"></a><a href="#time-column" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42 [...]
-<p>The <code>parser</code> needs to know how to extract the main timestamp field from the input data. When using a <code>json</code> type <code>parseSpec</code>, the timestamp is defined in a <code>timestampSpec</code>.</p>
-<p>The timestamp column in our input data is named &quot;ts&quot;, containing ISO 8601 timestamps, so let's add a <code>timestampSpec</code> with that information to the <code>parseSpec</code>:</p>
+<p>The <code>dataSchema</code> needs to know how to extract the main timestamp field from the input data.</p>
+<p>The timestamp column in our input data is named &quot;ts&quot;, containing ISO 8601 timestamps, so let's add a <code>timestampSpec</code> with that information to the <code>dataSchema</code>:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
   }
 }
 </code></pre>
@@ -161,24 +142,16 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <li><p>If rollup is disabled, then all columns are treated as &quot;dimensions&quot; and no pre-aggregation occurs.</p></li>
 </ul>
 <p>For this tutorial, let's enable rollup. This is specified with a <code>granularitySpec</code> on the <code>dataSchema</code>.</p>
-<p>Note that the <code>granularitySpec</code> lies outside of the <code>parser</code>. We will revisit the <code>parser</code> soon when we define our dimensions and metrics.</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
   },
   "granularitySpec" : {
     "rollup" : true
   }
 }
-
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="choosing-dimensions-and-metrics"></a><a href="#choosing-dimensions-and-metrics" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 [...]
 <p>For this example dataset, the following is a sensible split for &quot;dimensions&quot; and &quot;metrics&quot;:</p>
@@ -189,27 +162,21 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>The dimensions here are a group of properties that identify a unidirectional flow of IP traffic, while the metrics represent facts about the IP traffic flow specified by a dimension grouping.</p>
 <p>Let's look at how to define these dimensions and metrics within the ingestion spec.</p>
 <h4><a class="anchor" aria-hidden="true" id="dimensions"></a><a href="#dimensions" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1 [...]
-<p>Dimensions are specified with a <code>dimensionsSpec</code> inside the <code>parseSpec</code>.</p>
+<p>Dimensions are specified with a <code>dimensionsSpec</code> inside the <code>dataSchema</code>.</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "granularitySpec" : {
     "rollup" : true
@@ -230,24 +197,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>Metrics are specified with a <code>metricsSpec</code> inside the <code>dataSchema</code>:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "metricsSpec" : [
     { "type" : "count", "name" : "count" },
@@ -291,24 +252,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>Segment granularity is configured by the <code>segmentGranularity</code> property in the <code>granularitySpec</code>. For this tutorial, we'll create hourly segments:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "metricsSpec" : [
     { "type" : "count", "name" : "count" },
@@ -328,24 +283,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>The query granularity is configured by the <code>queryGranularity</code> property in the <code>granularitySpec</code>. For this tutorial, let's use minute granularity:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "metricsSpec" : [
     { "type" : "count", "name" : "count" },
@@ -356,7 +305,7 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
   "granularitySpec" : {
     "type" : "uniform",
     "segmentGranularity" : "HOUR",
-    "queryGranularity" : "MINUTE"
+    "queryGranularity" : "MINUTE",
     "rollup" : true
   }
 }
@@ -372,24 +321,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>The interval is also specified in the <code>granularitySpec</code>:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "metricsSpec" : [
     { "type" : "count", "name" : "count" },
@@ -410,28 +353,22 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>We've now finished defining our <code>dataSchema</code>. The remaining steps are to place the <code>dataSchema</code> we created into an ingestion task spec, and specify the input source.</p>
 <p>The <code>dataSchema</code> is shared across all task types, but each task type has its own specification format. For this tutorial, we will use the native batch ingestion task:</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"ingestion-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"timestampSpec"</span> : {
-            <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
-            <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
-          },
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span>: [
-              <span class="hljs-string">"srcIP"</span>,
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
-            ]
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span> : {
+        <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
+        <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span>: [
+          <span class="hljs-string">"srcIP"</span>,
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -451,39 +388,47 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 }
 </code></pre>
 <h2><a class="anchor" aria-hidden="true" id="define-the-input-source"></a><a href="#define-the-input-source" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 [...]
-<p>Now let's define our input source, which is specified in an <code>ioConfig</code> object. Each task type has its own type of <code>ioConfig</code>. The native batch task uses &quot;firehoses&quot; to read input data, so let's configure a &quot;local&quot; firehose to read the example netflow data we saved earlier:</p>
+<p>Now let's define our input source, which is specified in an <code>ioConfig</code> object. Each task type has its own type of <code>ioConfig</code>. To read input data, we need to specify an <code>inputSource</code>. The example netflow data we saved earlier needs to be read from a local file, which is configured below:</p>
 <pre><code class="hljs css language-json">    "ioConfig" : {
-      "type" : "index",
-      "firehose" : {
+      "type" : "index_parallel",
+      "inputSource" : {
         "type" : "local",
         "baseDir" : "quickstart/",
         "filter" : "ingestion-tutorial-data.json"
       }
     }
 </code></pre>
+<h3><a class="anchor" aria-hidden="true" id="define-the-format-of-the-data"></a><a href="#define-the-format-of-the-data" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12  [...]
+<p>Since our input data is represented as JSON strings, we'll use a <code>inputFormat</code> to <code>json</code> format:</p>
+<pre><code class="hljs css language-json">    "ioConfig" : {
+      "type" : "index_parallel",
+      "inputSource" : {
+        "type" : "local",
+        "baseDir" : "quickstart/",
+        "filter" : "ingestion-tutorial-data.json"
+      },
+      "inputFormat" : {
+        "type" : "json"
+      }
+    }
+</code></pre>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"ingestion-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"timestampSpec"</span> : {
-            <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
-            <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
-          },
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span>: [
-              <span class="hljs-string">"srcIP"</span>,
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
-            ]
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span> : {
+        <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
+        <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span>: [
+          <span class="hljs-string">"srcIP"</span>,
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -500,11 +445,14 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"ingestion-tutorial-data.json"</span>
+      },
+      <span class="hljs-attr">"inputFormat"</span> : {
+        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"json"</span>
       }
     }
   }
@@ -514,7 +462,7 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>Each ingestion task has a <code>tuningConfig</code> section that allows users to tune various ingestion parameters.</p>
 <p>As an example, let's add a <code>tuningConfig</code> that sets a target segment size for the native batch ingestion task:</p>
 <pre><code class="hljs css language-json">    "tuningConfig" : {
-      "type" : "index",
+      "type" : "index_parallel",
       "maxRowsPerSegment" : 5000000
     }
 </code></pre>
@@ -522,28 +470,22 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <h2><a class="anchor" aria-hidden="true" id="final-spec"></a><a href="#final-spec" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1 [...]
 <p>We've finished defining the ingestion spec, it should now look like the following:</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"ingestion-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"timestampSpec"</span> : {
-            <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
-            <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
-          },
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span>: [
-              <span class="hljs-string">"srcIP"</span>,
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
-            ]
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span> : {
+        <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
+        <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span>: [
+          <span class="hljs-string">"srcIP"</span>,
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -560,15 +502,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"ingestion-tutorial-data.json"</span>
+      },
+      <span class="hljs-attr">"inputFormat"</span> : {
+        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"json"</span>
       }
     },
     <span class="hljs-attr">"tuningConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>
     }
   }
@@ -598,7 +543,7 @@ Retrieved 5 rows <span class="hljs-keyword">in</span> 0.12s.
 
 dsql&gt;
 </code></pre>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/tutorials/tutorial-delete-data.html"><span class="arrow-prev">← </span><span>Deleting data</span></a><a class="docs-next button" href="/docs/0.17.0/tutorials/tutorial-transform-spec.html"><span>Transforming input data</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#example-data">Example data</a></li><li><a href= [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/0.17.0/tutorials/tutorial-delete-data.html"><span class="arrow-prev">← </span><span>Deleting data</span></a><a class="docs-next button" href="/docs/0.17.0/tutorials/tutorial-transform-spec.html"><span>Transforming input data</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#example-data">Example data</a></li><li><a href= [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/0.17.0/tutorials/tutorial-kafka.html b/docs/0.17.0/tutorials/tutorial-kafka.html
index 05325b6..989a0b5 100644
--- a/docs/0.17.0/tutorials/tutorial-kafka.html
+++ b/docs/0.17.0/tutorials/tutorial-kafka.html
@@ -168,38 +168,32 @@ Similarly, you can also edit the spec directly and see it reflected in the previ
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span>: {
       <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"wikipedia"</span>,
-      <span class="hljs-attr">"parser"</span>: {
-        <span class="hljs-attr">"type"</span>: <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span>: {
-          <span class="hljs-attr">"format"</span>: <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"timestampSpec"</span>: {
-            <span class="hljs-attr">"column"</span>: <span class="hljs-string">"time"</span>,
-            <span class="hljs-attr">"format"</span>: <span class="hljs-string">"auto"</span>
-          },
-          <span class="hljs-attr">"dimensionsSpec"</span>: {
-            <span class="hljs-attr">"dimensions"</span>: [
-              <span class="hljs-string">"channel"</span>,
-              <span class="hljs-string">"cityName"</span>,
-              <span class="hljs-string">"comment"</span>,
-              <span class="hljs-string">"countryIsoCode"</span>,
-              <span class="hljs-string">"countryName"</span>,
-              <span class="hljs-string">"isAnonymous"</span>,
-              <span class="hljs-string">"isMinor"</span>,
-              <span class="hljs-string">"isNew"</span>,
-              <span class="hljs-string">"isRobot"</span>,
-              <span class="hljs-string">"isUnpatrolled"</span>,
-              <span class="hljs-string">"metroCode"</span>,
-              <span class="hljs-string">"namespace"</span>,
-              <span class="hljs-string">"page"</span>,
-              <span class="hljs-string">"regionIsoCode"</span>,
-              <span class="hljs-string">"regionName"</span>,
-              <span class="hljs-string">"user"</span>,
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"added"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"deleted"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"delta"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
-            ]
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span>: {
+        <span class="hljs-attr">"column"</span>: <span class="hljs-string">"time"</span>,
+        <span class="hljs-attr">"format"</span>: <span class="hljs-string">"auto"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span>: {
+        <span class="hljs-attr">"dimensions"</span>: [
+          <span class="hljs-string">"channel"</span>,
+          <span class="hljs-string">"cityName"</span>,
+          <span class="hljs-string">"comment"</span>,
+          <span class="hljs-string">"countryIsoCode"</span>,
+          <span class="hljs-string">"countryName"</span>,
+          <span class="hljs-string">"isAnonymous"</span>,
+          <span class="hljs-string">"isMinor"</span>,
+          <span class="hljs-string">"isNew"</span>,
+          <span class="hljs-string">"isRobot"</span>,
+          <span class="hljs-string">"isUnpatrolled"</span>,
+          <span class="hljs-string">"metroCode"</span>,
+          <span class="hljs-string">"namespace"</span>,
+          <span class="hljs-string">"page"</span>,
+          <span class="hljs-string">"regionIsoCode"</span>,
+          <span class="hljs-string">"regionName"</span>,
+          <span class="hljs-string">"user"</span>,
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"added"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"deleted"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"delta"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [],
       <span class="hljs-attr">"granularitySpec"</span>: {
@@ -215,6 +209,9 @@ Similarly, you can also edit the spec directly and see it reflected in the previ
     },
     <span class="hljs-attr">"ioConfig"</span>: {
       <span class="hljs-attr">"topic"</span>: <span class="hljs-string">"wikipedia"</span>,
+      <span class="hljs-attr">"inputFormat"</span>: {
+        <span class="hljs-attr">"type"</span>: <span class="hljs-string">"json"</span>
+      },
       <span class="hljs-attr">"replicas"</span>: <span class="hljs-number">2</span>,
       <span class="hljs-attr">"taskDuration"</span>: <span class="hljs-string">"PT10M"</span>,
       <span class="hljs-attr">"completionTimeout"</span>: <span class="hljs-string">"PT20M"</span>,
diff --git a/docs/0.17.0/tutorials/tutorial-rollup.html b/docs/0.17.0/tutorials/tutorial-rollup.html
index d17da40..de7c212 100644
--- a/docs/0.17.0/tutorials/tutorial-rollup.html
+++ b/docs/0.17.0/tutorials/tutorial-rollup.html
@@ -97,25 +97,19 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>A file containing this sample input data is located at <code>quickstart/tutorial/rollup-data.json</code>.</p>
 <p>We'll ingest this data using the following ingestion task spec, located at <code>quickstart/tutorial/rollup-index.json</code>.</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"rollup-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span> : [
-              <span class="hljs-string">"srcIP"</span>,
-              <span class="hljs-string">"dstIP"</span>
-            ]
-          },
-          <span class="hljs-attr">"timestampSpec"</span>: {
-            <span class="hljs-attr">"column"</span>: <span class="hljs-string">"timestamp"</span>,
-            <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
-          }
-        }
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span> : [
+          <span class="hljs-string">"srcIP"</span>,
+          <span class="hljs-string">"dstIP"</span>
+        ]
+      },
+      <span class="hljs-attr">"timestampSpec"</span>: {
+        <span class="hljs-attr">"column"</span>: <span class="hljs-string">"timestamp"</span>,
+        <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -131,16 +125,19 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/tutorial"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"rollup-data.json"</span>
       },
+      <span class="hljs-attr">"inputFormat"</span> : {
+        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"json"</span>
+      },
       <span class="hljs-attr">"appendToExisting"</span> : <span class="hljs-literal">false</span>
     },
     <span class="hljs-attr">"tuningConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
       <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>
     }
diff --git a/docs/0.17.0/tutorials/tutorial-transform-spec.html b/docs/0.17.0/tutorials/tutorial-transform-spec.html
index 21c96f4..1e821cd 100644
--- a/docs/0.17.0/tutorials/tutorial-transform-spec.html
+++ b/docs/0.17.0/tutorials/tutorial-transform-spec.html
@@ -91,25 +91,19 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <h2><a class="anchor" aria-hidden="true" id="load-data-with-transform-specs"></a><a href="#load-data-with-transform-specs" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 1 [...]
 <p>We will ingest the sample data using the following spec, which demonstrates the use of transform specs:</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"transform-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span> : [
-              <span class="hljs-string">"animal"</span>,
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"location"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
-            ]
-          },
-          <span class="hljs-attr">"timestampSpec"</span>: {
-            <span class="hljs-attr">"column"</span>: <span class="hljs-string">"timestamp"</span>,
-            <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span>: {
+        <span class="hljs-attr">"column"</span>: <span class="hljs-string">"timestamp"</span>,
+        <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span> : [
+          <span class="hljs-string">"animal"</span>,
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"location"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -147,16 +141,19 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/tutorial"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"transform-data.json"</span>
       },
+      <span class="hljs-attr">"inputFormat"</span> : {
+        <span class="hljs-attr">"type"</span> :<span class="hljs-string">"json"</span>
+      },
       <span class="hljs-attr">"appendToExisting"</span> : <span class="hljs-literal">false</span>
     },
     <span class="hljs-attr">"tuningConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
       <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>
     }
diff --git a/docs/latest/design/architecture.html b/docs/latest/design/architecture.html
index c12caa7..97f3bd6 100644
--- a/docs/latest/design/architecture.html
+++ b/docs/latest/design/architecture.html
@@ -222,7 +222,7 @@ or a Historical process.</li>
 <p>You can inspect the state of currently active segments using the Druid SQL
 <a href="/docs/latest/querying/sql.html#segments-table"><code>sys.segments</code> table</a>. It includes the following flags:</p>
 <ul>
-<li><code>is_published</code>: True if segment metadata has been published to the metadata store and <code>used</code> is true.</li>
+<li><code>is_published</code>: True if segment metadata has been published to the metadata stored and <code>used</code> is true.</li>
 <li><code>is_available</code>: True if the segment is currently available for querying, either on a realtime task or Historical
 process.</li>
 <li><code>is_realtime</code>: True if the segment is <em>only</em> available on realtime tasks. For datasources that use realtime ingestion,
diff --git a/docs/latest/development/extensions-core/avro.html b/docs/latest/development/extensions-core/avro.html
index 8fe1a1c..11f6cf2 100644
--- a/docs/latest/development/extensions-core/avro.html
+++ b/docs/latest/development/extensions-core/avro.html
@@ -77,12 +77,13 @@
   ~ specific language governing permissions and limitations
   ~ under the License.
   -->
-<p>This Apache Druid extension enables Druid to ingest and understand the Apache Avro data format. Make sure to <a href="/docs/latest/development/extensions.html#loading-extensions">include</a> <code>druid-avro-extensions</code> as an extension.</p>
-<p>The <code>druid-avro-extensions</code> provides two Avro Parsers for stream ingestion and Hadoop batch ingestion.
-See <a href="/docs/latest/ingestion/data-formats.html#avro-hadoop-parser">Avro Hadoop Parser</a>
-and <a href="/docs/latest/ingestion/data-formats.html#avro-stream-parser">Avro Stream Parser</a>
-for details.</p>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/approximate-histograms.html"><span class="arrow-prev">← </span><span>Approximate Histogram aggregators</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/bloom-filter.html"><span>Bloom Filter</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"></nav></div><footer class="nav-footer druid-footer [...]
+<h2><a class="anchor" aria-hidden="true" id="avro-extension"></a><a href="#avro-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0- [...]
+<p>This Apache Druid extension enables Druid to ingest and understand the Apache Avro data format. This extension provides
+two Avro Parsers for stream ingestion and Hadoop batch ingestion.
+See <a href="/docs/latest/ingestion/data-formats.html#avro-hadoop-parser">Avro Hadoop Parser</a> and <a href="/docs/latest/ingestion/data-formats.html#avro-stream-parser">Avro Stream Parser</a>
+for more details about how to use these in an ingestion spec.</p>
+<p>Make sure to <a href="/docs/latest/development/extensions.html#loading-extensions">include</a> <code>druid-avro-extensions</code> as an extension.</p>
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/approximate-histograms.html"><span class="arrow-prev">← </span><span>Approximate Histogram aggregators</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/bloom-filter.html"><span>Bloom Filter</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#avro-extens [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/latest/development/extensions-core/google.html b/docs/latest/development/extensions-core/google.html
index 883dedd..c7d37ac 100644
--- a/docs/latest/development/extensions-core/google.html
+++ b/docs/latest/development/extensions-core/google.html
@@ -77,10 +77,24 @@
   ~ specific language governing permissions and limitations
   ~ under the License.
   -->
+<h2><a class="anchor" aria-hidden="true" id="google-cloud-storage-extension"></a><a href="#google-cloud-storage-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 1 [...]
+<p>This extension allows you to do 2 things:</p>
+<ul>
+<li><a href="#reading-data-from-google-cloud-storage">Ingest data</a> from files stored in Google Cloud Storage.</li>
+<li>Write segments to <a href="#deep-storage">deep storage</a> in S3.</li>
+</ul>
 <p>To use this Apache Druid extension, make sure to <a href="/docs/latest/development/extensions.html#loading-extensions">include</a> <code>druid-google-extensions</code> extension.</p>
-<h2><a class="anchor" aria-hidden="true" id="deep-storage"></a><a href="#deep-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
+<h3><a class="anchor" aria-hidden="true" id="required-configuration"></a><a href="#required-configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
+<p>To configure connectivity to google cloud, run druid processes with <code>GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account_keyfile</code> in the environment.</p>
+<h3><a class="anchor" aria-hidden="true" id="reading-data-from-google-cloud-storage"></a><a href="#reading-data-from-google-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1. [...]
+<p>The <a href="/docs/latest/ingestion/native-batch.html#google-cloud-storage-input-source">Google Cloud Storage input source</a> is supported by the <a href="/docs/latest/ingestion/native-batch.html#parallel-task">Parallel task</a>
+to read objects directly from Google Cloud Storage. If you use the <a href="/docs/latest/ingestion/hadoop.html">Hadoop task</a>,
+you can read data from Google Cloud Storage by specifying the paths in your <a href="/docs/latest/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
+<p>Objects can also be read directly from Google Cloud Storage via the <a href="/docs/latest/ingestion/native-batch.html#staticgoogleblobstorefirehose">StaticGoogleBlobStoreFirehose</a></p>
+<h3><a class="anchor" aria-hidden="true" id="deep-storage"></a><a href="#deep-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
 <p>Deep storage can be written to Google Cloud Storage either via this extension or the <a href="/docs/latest/development/extensions-core/hdfs.html">druid-hdfs-storage extension</a>.</p>
-<h3><a class="anchor" aria-hidden="true" id="configuration"></a><a href="#configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
+<h4><a class="anchor" aria-hidden="true" id="configuration"></a><a href="#configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
+<p>To configure connectivity to google cloud, run druid processes with <code>GOOGLE_APPLICATION_CREDENTIALS=/path/to/service_account_keyfile</code> in the environment.</p>
 <table>
 <thead>
 <tr><th>Property</th><th>Possible Values</th><th>Description</th><th>Default</th></tr>
@@ -91,11 +105,7 @@
 <tr><td><code>druid.google.prefix</code></td><td></td><td>GCS prefix.</td><td>No-prefix</td></tr>
 </tbody>
 </table>
-<h2><a class="anchor" aria-hidden="true" id="reading-data-from-google-cloud-storage"></a><a href="#reading-data-from-google-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1. [...]
-<p>The <a href="/docs/latest/ingestion/native-batch.html#google-cloud-storage-input-source">Google Cloud Storage input source</a> is supported by the <a href="/docs/latest/ingestion/native-batch.html#parallel-task">Parallel task</a>
-to read objects directly from Google Cloud Storage. If you use the <a href="/docs/latest/ingestion/hadoop.html">Hadoop task</a>,
-you can read data from Google Cloud Storage by specifying the paths in your <a href="/docs/latest/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/druid-lookups.html"><span class="arrow-prev">← </span><span>Cached Lookup Module</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/hdfs.html"><span>HDFS</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#deep-storage">Deep Storage</a><ul class="toc-head [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/druid-lookups.html"><span class="arrow-prev">← </span><span>Cached Lookup Module</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/hdfs.html"><span>HDFS</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#google-cloud-storage-extension">Google Cloud Stor [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/latest/development/extensions-core/hdfs.html b/docs/latest/development/extensions-core/hdfs.html
index 7a43a36..d353ed7 100644
--- a/docs/latest/development/extensions-core/hdfs.html
+++ b/docs/latest/development/extensions-core/hdfs.html
@@ -91,26 +91,123 @@
 <tr><td><code>druid.hadoop.security.kerberos.keytab</code></td><td><code>/etc/security/keytabs/druid.headlessUser.keytab</code></td><td>Path to keytab file</td><td>empty</td></tr>
 </tbody>
 </table>
-<p>If you are using the Hadoop indexer, set your output directory to be a location on Hadoop and it will work.
+<p>Besides the above settings, you also need to include all Hadoop configuration files (such as <code>core-site.xml</code>, <code>hdfs-site.xml</code>)
+in the Druid classpath. One way to do this is copying all those files under <code>${DRUID_HOME}/conf/_common</code>.</p>
+<p>If you are using the Hadoop ingestion, set your output directory to be a location on Hadoop and it will work.
 If you want to eagerly authenticate against a secured hadoop/hdfs cluster you must set <code>druid.hadoop.security.kerberos.principal</code> and <code>druid.hadoop.security.kerberos.keytab</code>, this is an alternative to the cron job method that runs <code>kinit</code> command periodically.</p>
-<h3><a class="anchor" aria-hidden="true" id="configuration-for-google-cloud-storage"></a><a href="#configuration-for-google-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1. [...]
-<p>The HDFS extension can also be used for GCS as deep storage.</p>
+<h3><a class="anchor" aria-hidden="true" id="configuration-for-cloud-storage"></a><a href="#configuration-for-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 [...]
+<p>You can also use the AWS S3 or the Google Cloud Storage as the deep storage via HDFS.</p>
+<h4><a class="anchor" aria-hidden="true" id="configuration-for-aws-s3"></a><a href="#configuration-for-aws-s3" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-. [...]
+<p>To use the AWS S3 as the deep storage, you need to configure <code>druid.storage.storageDirectory</code> properly.</p>
 <table>
 <thead>
 <tr><th>Property</th><th>Possible Values</th><th>Description</th><th>Default</th></tr>
 </thead>
 <tbody>
 <tr><td><code>druid.storage.type</code></td><td>hdfs</td><td></td><td>Must be set.</td></tr>
-<tr><td><code>druid.storage.storageDirectory</code></td><td></td><td><a href="gs://bucket/example/directory">gs://bucket/example/directory</a></td><td>Must be set.</td></tr>
+<tr><td><code>druid.storage.storageDirectory</code></td><td><a href="s3a://bucket/example/directory">s3a://bucket/example/directory</a> or <a href="s3n://bucket/example/directory">s3n://bucket/example/directory</a></td><td>Path to the deep storage</td><td>Must be set.</td></tr>
 </tbody>
 </table>
-<p>All services that need to access GCS need to have the <a href="https://cloud.google.com/hadoop/google-cloud-storage-connector#manualinstallation">GCS connector jar</a> in their class path. One option is to place this jar in <druid>/lib/ and <druid>/extensions/druid-hdfs-storage/</p>
-<p>Tested with Druid 0.9.0, Hadoop 2.7.2 and gcs-connector jar 1.4.4-hadoop2.</p>
-<h2><a class="anchor" aria-hidden="true" id="reading-data-from-hdfs"></a><a href="#reading-data-from-hdfs" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
+<p>You also need to include the <a href="https://hadoop.apache.org/docs/current/hadoop-aws/tools/hadoop-aws/index.html">Hadoop AWS module</a>, especially the <code>hadoop-aws.jar</code> in the Druid classpath.
+Run the below command to install the <code>hadoop-aws.jar</code> file under <code>${DRUID_HOME}/extensions/druid-hdfs-storage</code> in all nodes.</p>
+<pre><code class="hljs css language-bash">java -classpath <span class="hljs-string">"<span class="hljs-variable">${DRUID_HOME}</span>lib/*"</span> org.apache.druid.cli.Main tools pull-deps -h <span class="hljs-string">"org.apache.hadoop:hadoop-aws:<span class="hljs-variable">${HADOOP_VERSION}</span>"</span>;
+cp <span class="hljs-variable">${DRUID_HOME}</span>/hadoop-dependencies/hadoop-aws/<span class="hljs-variable">${HADOOP_VERSION}</span>/hadoop-aws-<span class="hljs-variable">${HADOOP_VERSION}</span>.jar <span class="hljs-variable">${DRUID_HOME}</span>/extensions/druid-hdfs-storage/
+</code></pre>
+<p>Finally, you need to add the below properties in the <code>core-site.xml</code>.
+For more configurations, see the <a href="https://hadoop.apache.org/docs/current/hadoop-aws/tools/hadoop-aws/index.html">Hadoop AWS module</a>.</p>
+<pre><code class="hljs css language-xml"><span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.s3a.impl<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>org.apache.hadoop.fs.s3a.S3AFileSystem<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>The implementation class of the S3A Filesystem<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.AbstractFileSystem.s3a.impl<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>org.apache.hadoop.fs.s3a.S3A<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>The implementation class of the S3A AbstractFileSystem.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.s3a.access.key<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>AWS access key ID. Omit for IAM role-based or provider-based authentication.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>your access key<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.s3a.secret.key<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>AWS secret key. Omit for IAM role-based or provider-based authentication.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>your secret key<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+</code></pre>
+<h4><a class="anchor" aria-hidden="true" id="configuration-for-google-cloud-storage"></a><a href="#configuration-for-google-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1. [...]
+<p>To use the Google Cloud Storage as the deep storage, you need to configure <code>druid.storage.storageDirectory</code> properly.</p>
+<table>
+<thead>
+<tr><th>Property</th><th>Possible Values</th><th>Description</th><th>Default</th></tr>
+</thead>
+<tbody>
+<tr><td><code>druid.storage.type</code></td><td>hdfs</td><td></td><td>Must be set.</td></tr>
+<tr><td><code>druid.storage.storageDirectory</code></td><td><a href="gs://bucket/example/directory">gs://bucket/example/directory</a></td><td>Path to the deep storage</td><td>Must be set.</td></tr>
+</tbody>
+</table>
+<p>All services that need to access GCS need to have the <a href="https://cloud.google.com/dataproc/docs/concepts/connectors/cloud-storage#other_sparkhadoop_clusters">GCS connector jar</a> in their class path.
+Please read the <a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md">install instructions</a>
+to properly set up the necessary libraries and configurations.
+One option is to place this jar in <code>${DRUID_HOME}/lib/</code> and <code>${DRUID_HOME}/extensions/druid-hdfs-storage/</code>.</p>
+<p>Finally, you need to configure the <code>core-site.xml</code> file with the filesystem
+and authentication properties needed for GCS. You may want to copy the below
+example properties. Please follow the instructions at
+<a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md">https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md</a>
+for more details.
+For more configurations, <a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/conf/gcs-core-default.xml">GCS core default</a>
+and <a href="https://github.com/GoogleCloudPlatform/bdutil/blob/master/conf/hadoop2/gcs-core-template.xml">GCS core template</a>.</p>
+<pre><code class="hljs css language-xml"><span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.gs.impl<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>The FileSystem for gs: (GCS) uris.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>fs.AbstractFileSystem.gs.impl<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>The AbstractFileSystem for gs: uris.<span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>google.cloud.auth.service.account.enable<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>true<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>
+    Whether to use a service account for GCS authorization.
+    Setting this property to `false` will disable use of service accounts for
+    authentication.
+  <span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+
+<span class="hljs-tag">&lt;<span class="hljs-name">property</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">name</span>&gt;</span>google.cloud.auth.service.account.json.keyfile<span class="hljs-tag">&lt;/<span class="hljs-name">name</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">value</span>&gt;</span>/path/to/keyfile<span class="hljs-tag">&lt;/<span class="hljs-name">value</span>&gt;</span>
+  <span class="hljs-tag">&lt;<span class="hljs-name">description</span>&gt;</span>
+    The JSON key file of the service account used for GCS
+    access when google.cloud.auth.service.account.enable is true.
+  <span class="hljs-tag">&lt;/<span class="hljs-name">description</span>&gt;</span>
+<span class="hljs-tag">&lt;/<span class="hljs-name">property</span>&gt;</span>
+</code></pre>
+<p>Tested with Druid 0.17.0, Hadoop 2.8.5 and gcs-connector jar 2.0.0-hadoop2.</p>
+<h2><a class="anchor" aria-hidden="true" id="reading-data-from-hdfs-or-cloud-storage"></a><a href="#reading-data-from-hdfs-or-cloud-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2  [...]
+<h3><a class="anchor" aria-hidden="true" id="native-batch-ingestion"></a><a href="#native-batch-ingestion" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
 <p>The <a href="/docs/latest/ingestion/native-batch.html#hdfs-input-source">HDFS input source</a> is supported by the <a href="/docs/latest/ingestion/native-batch.html#parallel-task">Parallel task</a>
-to read objects directly from HDFS Storage. If you use the <a href="/docs/latest/ingestion/hadoop.html">Hadoop task</a>,
-you can read data from HDFS by specifying the paths in your <a href="/docs/latest/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/google.html"><span class="arrow-prev">← </span><span>Google Cloud Storage</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/kafka-extraction-namespace.html"><span>Apache Kafka Lookups</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#deep-storage">Deep [...]
+to read files directly from the HDFS Storage. You may be able to read objects from cloud storage
+with the HDFS input source, but we highly recommend to use a proper
+<a href="/docs/latest/ingestion/native-batch.html#input-sources">Input Source</a> instead if possible because
+it is simple to set up. For now, only the <a href="/docs/latest/ingestion/native-batch.html#s3-input-source">S3 input source</a>
+and the <a href="/docs/latest/ingestion/native-batch.html#google-cloud-storage-input-source">Google Cloud Storage input source</a>
+are supported for cloud storage types, and so you may still want to use the HDFS input source
+to read from cloud storage other than those two.</p>
+<h3><a class="anchor" aria-hidden="true" id="hadoop-based-ingestion"></a><a href="#hadoop-based-ingestion" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
+<p>If you use the <a href="/docs/latest/ingestion/hadoop.html">Hadoop ingestion</a>, you can read data from HDFS
+by specifying the paths in your <a href="/docs/latest/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.
+See the <a href="/docs/latest/ingestion/hadoop.html#static">Static</a> inputSpec for details.</p>
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/google.html"><span class="arrow-prev">← </span><span>Google Cloud Storage</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/kafka-extraction-namespace.html"><span>Apache Kafka Lookups</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#deep-storage">Deep [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/latest/development/extensions-core/kafka-ingestion.html b/docs/latest/development/extensions-core/kafka-ingestion.html
index 68654b7..a1cfb97 100644
--- a/docs/latest/development/extensions-core/kafka-ingestion.html
+++ b/docs/latest/development/extensions-core/kafka-ingestion.html
@@ -269,7 +269,7 @@ MiddleManagers. A supervisor for a dataSource is started by submitting a supervi
 </tbody>
 </table>
 <h4><a class="anchor" aria-hidden="true" id="specifying-data-format"></a><a href="#specifying-data-format" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
-<p>Kafka indexing service supports both <a href="/docs/latest/ingestion/data-formats.html#input-format"><code>inputFormat</code></a> and <a href="/docs/latest/ingestion/data-formats.html#parser-deprecated"><code>parser</code></a> to specify the data format.
+<p>Kafka indexing service supports both <a href="/docs/latest/ingestion/data-formats.html#input-format"><code>inputFormat</code></a> and <a href="/docs/latest/ingestion/data-formats.html#parser"><code>parser</code></a> to specify the data format.
 The <code>inputFormat</code> is a new and recommended way to specify the data format for Kafka indexing service,
 but unfortunately, it doesn't support all data formats supported by the legacy <code>parser</code>.
 (They will be supported in the future.)</p>
diff --git a/docs/latest/development/extensions-core/kinesis-ingestion.html b/docs/latest/development/extensions-core/kinesis-ingestion.html
index 419b9b6..2d04cfc 100644
--- a/docs/latest/development/extensions-core/kinesis-ingestion.html
+++ b/docs/latest/development/extensions-core/kinesis-ingestion.html
@@ -272,7 +272,7 @@ and the MiddleManagers. A supervisor for a dataSource is started by submitting a
 </tbody>
 </table>
 <h4><a class="anchor" aria-hidden="true" id="specifying-data-format"></a><a href="#specifying-data-format" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
-<p>Kinesis indexing service supports both <a href="/docs/latest/ingestion/data-formats.html#input-format"><code>inputFormat</code></a> and <a href="/docs/latest/ingestion/data-formats.html#parser-deprecated"><code>parser</code></a> to specify the data format.
+<p>Kinesis indexing service supports both <a href="/docs/latest/ingestion/data-formats.html#input-format"><code>inputFormat</code></a> and <a href="/docs/latest/ingestion/data-formats.html#parser"><code>parser</code></a> to specify the data format.
 The <code>inputFormat</code> is a new and recommended way to specify the data format for Kinesis indexing service,
 but unfortunately, it doesn't support all data formats supported by the legacy <code>parser</code>.
 (They will be supported in the future.)</p>
diff --git a/docs/latest/development/extensions-core/mysql.html b/docs/latest/development/extensions-core/mysql.html
index 06ab20b..14437e9 100644
--- a/docs/latest/development/extensions-core/mysql.html
+++ b/docs/latest/development/extensions-core/mysql.html
@@ -154,7 +154,7 @@ with the location (host name and port) of the database.</p>
 <h3><a class="anchor" aria-hidden="true" id="mysql-firehose"></a><a href="#mysql-firehose" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0- [...]
 <p>The MySQL extension provides an implementation of an <a href="/docs/latest/ingestion/native-batch.html#firehoses-deprecated">SqlFirehose</a> which can be used to ingest data into Druid from a MySQL database.</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span>: {
     <span class="hljs-attr">"dataSchema"</span>: {
       <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"some_datasource"</span>,
@@ -191,7 +191,7 @@ with the location (host name and port) of the database.</p>
       }
     },
     <span class="hljs-attr">"ioConfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"firehose"</span>: {
         <span class="hljs-attr">"type"</span>: <span class="hljs-string">"sql"</span>,
         <span class="hljs-attr">"database"</span>: {
@@ -208,7 +208,7 @@ with the location (host name and port) of the database.</p>
       }
     },
     <span class="hljs-attr">"tuningconfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>
     }
   }
 }
diff --git a/docs/latest/development/extensions-core/orc.html b/docs/latest/development/extensions-core/orc.html
index 7ef0a32..75de56e 100644
--- a/docs/latest/development/extensions-core/orc.html
+++ b/docs/latest/development/extensions-core/orc.html
@@ -77,12 +77,12 @@
   ~ specific language governing permissions and limitations
   ~ under the License.
   -->
-<p>This Apache Druid module extends <a href="/docs/latest/ingestion/hadoop.html">Druid Hadoop based indexing</a> to ingest data directly from offline
-Apache ORC files.</p>
-<p>To use this extension, make sure to <a href="/docs/latest/development/extensions.html#loading-extensions">include</a> <code>druid-orc-extensions</code>.</p>
-<p>The <code>druid-orc-extensions</code> provides the <a href="/docs/latest/ingestion/data-formats.html#orc">ORC input format</a> and the <a href="/docs/latest/ingestion/data-formats.html#orc-hadoop-parser">ORC Hadoop parser</a>
+<h2><a class="anchor" aria-hidden="true" id="orc-extension"></a><a href="#orc-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
+<p>This Apache Druid extension enables Druid to ingest and understand the Apache ORC data format.</p>
+<p>The extension provides the <a href="/docs/latest/ingestion/data-formats.html#orc">ORC input format</a> and the <a href="/docs/latest/ingestion/data-formats.html#orc-hadoop-parser">ORC Hadoop parser</a>
 for <a href="/docs/latest/ingestion/native-batch.html">native batch ingestion</a> and <a href="/docs/latest/ingestion/hadoop.html">Hadoop batch ingestion</a>, respectively.
 Please see corresponding docs for details.</p>
+<p>To use this extension, make sure to <a href="/docs/latest/development/extensions.html#loading-extensions">include</a> <code>druid-orc-extensions</code>.</p>
 <h3><a class="anchor" aria-hidden="true" id="migration-from-contrib-extension"></a><a href="#migration-from-contrib-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13. [...]
 <p>This extension, first available in version 0.15.0, replaces the previous 'contrib' extension which was available until
 0.14.0-incubating. While this extension can index any data the 'contrib' extension could, the JSON spec for the
@@ -130,7 +130,7 @@ for a dimension <code>nestedData_dim1</code>, to preserve Druid schema could be
  ...
 }
 </code></pre>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/mysql.html"><span class="arrow-prev">← </span><span class="function-name-prevnext">MySQL Metadata Store</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/parquet.html"><span>Apache Parquet Extension</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"></nav></div><footer class="nav-footer drui [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/mysql.html"><span class="arrow-prev">← </span><span class="function-name-prevnext">MySQL Metadata Store</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/parquet.html"><span>Apache Parquet Extension</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#orc [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/latest/development/extensions-core/postgresql.html b/docs/latest/development/extensions-core/postgresql.html
index 918c3e2..9ce05df 100644
--- a/docs/latest/development/extensions-core/postgresql.html
+++ b/docs/latest/development/extensions-core/postgresql.html
@@ -135,7 +135,7 @@ with the location (host name and port) of the database.</p>
 <h3><a class="anchor" aria-hidden="true" id="postgresql-firehose"></a><a href="#postgresql-firehose" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.2 [...]
 <p>The PostgreSQL extension provides an implementation of an <a href="/docs/latest/ingestion/native-batch.html#firehoses-deprecated">SqlFirehose</a> which can be used to ingest data into Druid from a PostgreSQL database.</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span>: {
     <span class="hljs-attr">"dataSchema"</span>: {
       <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"some_datasource"</span>,
@@ -172,7 +172,7 @@ with the location (host name and port) of the database.</p>
       }
     },
     <span class="hljs-attr">"ioConfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"firehose"</span>: {
         <span class="hljs-attr">"type"</span>: <span class="hljs-string">"sql"</span>,
         <span class="hljs-attr">"database"</span>: {
@@ -189,7 +189,7 @@ with the location (host name and port) of the database.</p>
       }
     },
     <span class="hljs-attr">"tuningconfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index"</span>
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>
     }
   }
 }
diff --git a/docs/latest/development/extensions-core/s3.html b/docs/latest/development/extensions-core/s3.html
index ce1617b..6cd8d2a 100644
--- a/docs/latest/development/extensions-core/s3.html
+++ b/docs/latest/development/extensions-core/s3.html
@@ -77,50 +77,39 @@
   ~ specific language governing permissions and limitations
   ~ under the License.
   -->
+<h2><a class="anchor" aria-hidden="true" id="s3-extension"></a><a href="#s3-extension" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
+<p>This extension allows you to do 2 things:</p>
+<ul>
+<li><a href="#reading-data-from-s3">Ingest data</a> from files stored in S3.</li>
+<li>Write segments to <a href="#deep-storage">deep storage</a> in S3.</li>
+</ul>
 <p>To use this Apache Druid extension, make sure to <a href="/docs/latest/development/extensions.html#loading-extensions">include</a> <code>druid-s3-extensions</code> as an extension.</p>
-<h2><a class="anchor" aria-hidden="true" id="deep-storage"></a><a href="#deep-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
+<h3><a class="anchor" aria-hidden="true" id="reading-data-from-s3"></a><a href="#reading-data-from-s3" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
+<p>The <a href="/docs/latest/ingestion/native-batch.html#s3-input-source">S3 input source</a> is supported by the <a href="/docs/latest/ingestion/native-batch.html#parallel-task">Parallel task</a>
+to read objects directly from S3. If you use the <a href="/docs/latest/ingestion/hadoop.html">Hadoop task</a>,
+you can read data from S3 by specifying the S3 paths in your <a href="/docs/latest/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
+<p>To configure the extension to read objects from S3 you need to configure how to <a href="#configuration">connect to S3</a>.</p>
+<h3><a class="anchor" aria-hidden="true" id="deep-storage"></a><a href="#deep-storage" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83. [...]
 <p>S3-compatible deep storage means either AWS S3 or a compatible service like Google Storage which exposes the same API as S3.</p>
-<h3><a class="anchor" aria-hidden="true" id="configuration"></a><a href="#configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
 <p>S3 deep storage needs to be explicitly enabled by setting <code>druid.storage.type=s3</code>. <strong>Only after setting the storage type to S3 will any of the settings below take effect.</strong></p>
-<p>The AWS SDK requires that the target region be specified. Two ways of doing this are by using the JVM system property <code>aws.region</code> or the environment variable <code>AWS_REGION</code>.</p>
-<p>As an example, to set the region to 'us-east-1' through system properties:</p>
-<ul>
-<li>Add <code>-Daws.region=us-east-1</code> to the jvm.config file for all Druid services.</li>
-<li>Add <code>-Daws.region=us-east-1</code> to <code>druid.indexer.runner.javaOpts</code> in middleManager/runtime.properties so that the property will be passed to Peon (worker) processes.</li>
-</ul>
+<p>To correctly configure this extension for deep storage in S3, first configure how to <a href="#configuration">connect to S3</a>.
+In addition to this you need to set additional configuration, specific for <a href="#deep-storage-specific-configuration">deep storage</a></p>
+<h4><a class="anchor" aria-hidden="true" id="deep-storage-specific-configuration"></a><a href="#deep-storage-specific-configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2 [...]
 <table>
 <thead>
 <tr><th>Property</th><th>Description</th><th>Default</th></tr>
 </thead>
 <tbody>
-<tr><td><code>druid.s3.accessKey</code></td><td>S3 access key. See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
-<tr><td><code>druid.s3.secretKey</code></td><td>S3 secret key. See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
-<tr><td><code>druid.s3.fileSessionCredentials</code></td><td>Path to properties file containing <code>sessionToken</code>, <code>accessKey</code> and <code>secretKey</code> value. One key/value pair per line (format <code>key=value</code>). See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
-<tr><td><code>druid.s3.protocol</code></td><td>Communication protocol type to use when sending requests to AWS. <code>http</code> or <code>https</code> can be used. This configuration would be ignored if <code>druid.s3.endpoint.url</code> is filled with a URL with a different protocol.</td><td><code>https</code></td></tr>
-<tr><td><code>druid.s3.disableChunkedEncoding</code></td><td>Disables chunked encoding. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#disableChunkedEncoding--">AWS document</a> for details.</td><td>false</td></tr>
-<tr><td><code>druid.s3.enablePathStyleAccess</code></td><td>Enables path style access. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#enablePathStyleAccess--">AWS document</a> for details.</td><td>false</td></tr>
-<tr><td><code>druid.s3.forceGlobalBucketAccessEnabled</code></td><td>Enables global bucket access. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#setForceGlobalBucketAccessEnabled-java.lang.Boolean-">AWS document</a> for details.</td><td>false</td></tr>
-<tr><td><code>druid.s3.endpoint.url</code></td><td>Service endpoint either with or without the protocol.</td><td>None</td></tr>
-<tr><td><code>druid.s3.endpoint.signingRegion</code></td><td>Region to use for SigV4 signing of requests (e.g. us-west-1).</td><td>None</td></tr>
-<tr><td><code>druid.s3.proxy.host</code></td><td>Proxy host to connect through.</td><td>None</td></tr>
-<tr><td><code>druid.s3.proxy.port</code></td><td>Port on the proxy host to connect through.</td><td>None</td></tr>
-<tr><td><code>druid.s3.proxy.username</code></td><td>User name to use when connecting through a proxy.</td><td>None</td></tr>
-<tr><td><code>druid.s3.proxy.password</code></td><td>Password to use when connecting through a proxy.</td><td>None</td></tr>
 <tr><td><code>druid.storage.bucket</code></td><td>Bucket to store in.</td><td>Must be set.</td></tr>
 <tr><td><code>druid.storage.baseKey</code></td><td>Base key prefix to use, i.e. what directory.</td><td>Must be set.</td></tr>
+<tr><td><code>druid.storage.type</code></td><td>Global deep storage provider. Must be set to <code>s3</code> to make use of this extension.</td><td>Must be set (likely <code>s3</code>).</td></tr>
 <tr><td><code>druid.storage.archiveBucket</code></td><td>S3 bucket name for archiving when running the <em>archive task</em>.</td><td>none</td></tr>
 <tr><td><code>druid.storage.archiveBaseKey</code></td><td>S3 object key prefix for archiving.</td><td>none</td></tr>
 <tr><td><code>druid.storage.disableAcl</code></td><td>Boolean flag to disable ACL. If this is set to <code>false</code>, the full control would be granted to the bucket owner. This may require to set additional permissions. See <a href="#s3-permissions-settings">S3 permissions settings</a>.</td><td>false</td></tr>
-<tr><td><code>druid.storage.sse.type</code></td><td>Server-side encryption type. Should be one of <code>s3</code>, <code>kms</code>, and <code>custom</code>. See the below <a href="#server-side-encryption">Server-side encryption section</a> for more details.</td><td>None</td></tr>
-<tr><td><code>druid.storage.sse.kms.keyId</code></td><td>AWS KMS key ID. This is used only when <code>druid.storage.sse.type</code> is <code>kms</code> and can be empty to use the default key ID.</td><td>None</td></tr>
-<tr><td><code>druid.storage.sse.custom.base64EncodedKey</code></td><td>Base64-encoded key. Should be specified if <code>druid.storage.sse.type</code> is <code>custom</code>.</td><td>None</td></tr>
-<tr><td><code>druid.storage.type</code></td><td>Global deep storage provider. Must be set to <code>s3</code> to make use of this extension.</td><td>Must be set (likely <code>s3</code>).</td></tr>
 <tr><td><code>druid.storage.useS3aSchema</code></td><td>If true, use the &quot;s3a&quot; filesystem when using Hadoop-based ingestion. If false, the &quot;s3n&quot; filesystem will be used. Only affects Hadoop-based ingestion.</td><td>false</td></tr>
 </tbody>
 </table>
-<h3><a class="anchor" aria-hidden="true" id="s3-permissions-settings"></a><a href="#s3-permissions-settings" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 [...]
-<p><code>s3:GetObject</code> and <code>s3:PutObject</code> are basically required for pushing/loading segments to/from S3.
-If <code>druid.storage.disableAcl</code> is set to <code>false</code>, then <code>s3:GetBucketAcl</code> and <code>s3:PutObjectAcl</code> are additionally required to set ACL for objects.</p>
+<h2><a class="anchor" aria-hidden="true" id="configuration"></a><a href="#configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
 <h3><a class="anchor" aria-hidden="true" id="s3-authentication-methods"></a><a href="#s3-authentication-methods" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c [...]
 <p>To connect to your S3 bucket (whether deep storage bucket or source bucket), Druid use the following credentials providers chain</p>
 <table>
@@ -140,6 +129,40 @@ If <code>druid.storage.disableAcl</code> is set to <code>false</code>, then <cod
 <p>You can find more information about authentication method <a href="https://docs.aws.amazon.com/fr_fr/sdk-for-java/v1/developer-guide/credentials.html">here</a><br/>
 <strong>Note :</strong> <em>Order is important here as it indicates the precedence of authentication methods.<br/>
 So if you are trying to use Instance profile information, you <strong>must not</strong> set <code>druid.s3.accessKey</code> and <code>druid.s3.secretKey</code> in your Druid runtime.properties</em></p>
+<h3><a class="anchor" aria-hidden="true" id="s3-permissions-settings"></a><a href="#s3-permissions-settings" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 [...]
+<p><code>s3:GetObject</code> and <code>s3:PutObject</code> are basically required for pushing/loading segments to/from S3.
+If <code>druid.storage.disableAcl</code> is set to <code>false</code>, then <code>s3:GetBucketAcl</code> and <code>s3:PutObjectAcl</code> are additionally required to set ACL for objects.</p>
+<h3><a class="anchor" aria-hidden="true" id="aws-region"></a><a href="#aws-region" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1 [...]
+<p>The AWS SDK requires that the target region be specified. Two ways of doing this are by using the JVM system property <code>aws.region</code> or the environment variable <code>AWS_REGION</code>.</p>
+<p>As an example, to set the region to 'us-east-1' through system properties:</p>
+<ul>
+<li>Add <code>-Daws.region=us-east-1</code> to the jvm.config file for all Druid services.</li>
+<li>Add <code>-Daws.region=us-east-1</code> to <code>druid.indexer.runner.javaOpts</code> in <a href="/docs/latest/configuration/index.html#middlemanager-configuration">Middle Manager configuration</a> so that the property will be passed to Peon (worker) processes.</li>
+</ul>
+<h3><a class="anchor" aria-hidden="true" id="connecting-to-s3-configuration"></a><a href="#connecting-to-s3-configuration" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 1 [...]
+<table>
+<thead>
+<tr><th>Property</th><th>Description</th><th>Default</th></tr>
+</thead>
+<tbody>
+<tr><td><code>druid.s3.accessKey</code></td><td>S3 access key. See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
+<tr><td><code>druid.s3.secretKey</code></td><td>S3 secret key. See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
+<tr><td><code>druid.s3.fileSessionCredentials</code></td><td>Path to properties file containing <code>sessionToken</code>, <code>accessKey</code> and <code>secretKey</code> value. One key/value pair per line (format <code>key=value</code>). See <a href="#s3-authentication-methods">S3 authentication methods</a> for more details</td><td>Can be omitted according to authentication methods chosen.</td></tr>
+<tr><td><code>druid.s3.protocol</code></td><td>Communication protocol type to use when sending requests to AWS. <code>http</code> or <code>https</code> can be used. This configuration would be ignored if <code>druid.s3.endpoint.url</code> is filled with a URL with a different protocol.</td><td><code>https</code></td></tr>
+<tr><td><code>druid.s3.disableChunkedEncoding</code></td><td>Disables chunked encoding. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#disableChunkedEncoding--">AWS document</a> for details.</td><td>false</td></tr>
+<tr><td><code>druid.s3.enablePathStyleAccess</code></td><td>Enables path style access. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#enablePathStyleAccess--">AWS document</a> for details.</td><td>false</td></tr>
+<tr><td><code>druid.s3.forceGlobalBucketAccessEnabled</code></td><td>Enables global bucket access. See <a href="https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/services/s3/AmazonS3Builder.html#setForceGlobalBucketAccessEnabled-java.lang.Boolean-">AWS document</a> for details.</td><td>false</td></tr>
+<tr><td><code>druid.s3.endpoint.url</code></td><td>Service endpoint either with or without the protocol.</td><td>None</td></tr>
+<tr><td><code>druid.s3.endpoint.signingRegion</code></td><td>Region to use for SigV4 signing of requests (e.g. us-west-1).</td><td>None</td></tr>
+<tr><td><code>druid.s3.proxy.host</code></td><td>Proxy host to connect through.</td><td>None</td></tr>
+<tr><td><code>druid.s3.proxy.port</code></td><td>Port on the proxy host to connect through.</td><td>None</td></tr>
+<tr><td><code>druid.s3.proxy.username</code></td><td>User name to use when connecting through a proxy.</td><td>None</td></tr>
+<tr><td><code>druid.s3.proxy.password</code></td><td>Password to use when connecting through a proxy.</td><td>None</td></tr>
+<tr><td><code>druid.storage.sse.type</code></td><td>Server-side encryption type. Should be one of <code>s3</code>, <code>kms</code>, and <code>custom</code>. See the below <a href="#server-side-encryption">Server-side encryption section</a> for more details.</td><td>None</td></tr>
+<tr><td><code>druid.storage.sse.kms.keyId</code></td><td>AWS KMS key ID. This is used only when <code>druid.storage.sse.type</code> is <code>kms</code> and can be empty to use the default key ID.</td><td>None</td></tr>
+<tr><td><code>druid.storage.sse.custom.base64EncodedKey</code></td><td>Base64-encoded key. Should be specified if <code>druid.storage.sse.type</code> is <code>custom</code>.</td><td>None</td></tr>
+</tbody>
+</table>
 <h2><a class="anchor" aria-hidden="true" id="server-side-encryption"></a><a href="#server-side-encryption" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
 <p>You can enable <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/serv-side-encryption.html">server-side encryption</a> by setting
 <code>druid.storage.sse.type</code> to a supported type of server-side encryption. The current supported types are:</p>
@@ -148,11 +171,7 @@ So if you are trying to use Instance profile information, you <strong>must not</
 <li>kms: <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/UsingKMSEncryption.html">Server-side encryption with AWS KMS–Managed Keys</a></li>
 <li>custom: <a href="https://docs.aws.amazon.com/AmazonS3/latest/dev/ServerSideEncryptionCustomerKeys.html">Server-side encryption with Customer-Provided Encryption Keys</a></li>
 </ul>
-<h2><a class="anchor" aria-hidden="true" id="reading-data-from-s3"></a><a href="#reading-data-from-s3" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
-<p>The <a href="/docs/latest/ingestion/native-batch.html#s3-input-source">S3 input source</a> is supported by the <a href="/docs/latest/ingestion/native-batch.html#parallel-task">Parallel task</a>
-to read objects directly from S3. If you use the <a href="/docs/latest/ingestion/hadoop.html">Hadoop task</a>,
-you can read data from S3 by specifying the S3 paths in your <a href="/docs/latest/ingestion/hadoop.html#inputspec"><code>inputSpec</code></a>.</p>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/protobuf.html"><span class="arrow-prev">← </span><span>Protobuf</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/simple-client-sslcontext.html"><span>Simple SSLContext Provider Module</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#deep-storage">Dee [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/development/extensions-core/protobuf.html"><span class="arrow-prev">← </span><span>Protobuf</span></a><a class="docs-next button" href="/docs/latest/development/extensions-core/simple-client-sslcontext.html"><span>Simple SSLContext Provider Module</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#s3-extension">S3  [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/latest/development/modules.html b/docs/latest/development/modules.html
index 83e4219..a4870de 100644
--- a/docs/latest/development/modules.html
+++ b/docs/latest/development/modules.html
@@ -189,9 +189,9 @@ in <a href="/docs/latest/ingestion/native-batch.html">native parallel indexing</
 }
 </code></pre>
 <p>This is registering the InputSource with Jackson's polymorphic serialization/deserialization layer.  More concretely, having this will mean that if you specify a <code>&quot;inputSource&quot;: { &quot;type&quot;: &quot;s3&quot;, ... }</code> in your IO config, then the system will load this InputSource for your <code>InputSource</code> implementation.</p>
-<p>Note that inside of Druid, we have made the @JacksonInject annotation for Jackson deserialized objects actually use the base Guice injector to resolve the object to be injected.  So, if your InputSource needs access to some object, you can add a @JacksonInject annotation on a setter and it will get set on instantiation.</p>
+<p>Note that inside of Druid, we have made the <code>@JacksonInject</code> annotation for Jackson deserialized objects actually use the base Guice injector to resolve the object to be injected.  So, if your InputSource needs access to some object, you can add a <code>@JacksonInject</code> annotation on a setter and it will get set on instantiation.</p>
 <h3><a class="anchor" aria-hidden="true" id="adding-support-for-a-new-data-format"></a><a href="#adding-support-for-a-new-data-format" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 [...]
-<p>Adding support for a new data format requires to implement two interfaces, i.e., <code>InputFormat</code> and <code>InputEntityReader</code>.
+<p>Adding support for a new data format requires implementing two interfaces, i.e., <code>InputFormat</code> and <code>InputEntityReader</code>.
 <code>InputFormat</code> is to define how your data is formatted. <code>InputEntityReader</code> is to define how to parse your data and convert into Druid <code>InputRow</code>.</p>
 <p>There is an example in the <code>druid-orc-extensions</code> module with the <code>OrcInputFormat</code> and <code>OrcReader</code>.</p>
 <p>Adding an InputFormat is very similar to adding an InputSource. They operate purely through Jackson and thus should just be additions to the Jackson modules returned by your DruidModule.</p>
diff --git a/docs/latest/ingestion/data-formats.html b/docs/latest/ingestion/data-formats.html
index 2b44580..b63345f 100644
--- a/docs/latest/ingestion/data-formats.html
+++ b/docs/latest/ingestion/data-formats.html
@@ -114,7 +114,7 @@ parsing data will not be as efficient as writing a native Java parser or using a
 <blockquote>
 <p>The Input Format is a new way to specify the data format of your input data which was introduced in 0.17.0.
 Unfortunately, the Input Format doesn't support all data formats or ingestion methods supported by Druid yet.
-Especially if you want to use the Hadoop ingestion, you still need to use the <a href="#parser-deprecated">Parser</a>.
+Especially if you want to use the Hadoop ingestion, you still need to use the <a href="#parser">Parser</a>.
 If your data is formatted in some format not listed in this section, please consider using the Parser instead.</p>
 </blockquote>
 <p>All forms of Druid ingestion require some form of schema object. The format of the data to be ingested is specified using the <code>inputFormat</code> entry in your <a href="/docs/latest/ingestion/index.html#ioconfig"><code>ioConfig</code></a>.</p>
@@ -310,19 +310,17 @@ and <code>parquet</code>.</p>
 <li><a href="http://jsonpath.herokuapp.com/">http://jsonpath.herokuapp.com/</a> is useful for testing <code>path</code>-type expressions.</li>
 <li>jackson-jq supports a subset of the full <a href="https://stedolan.github.io/jq/">jq</a> syntax.  Please refer to the <a href="https://github.com/eiiches/jackson-jq">jackson-jq documentation</a> for details.</li>
 </ul>
-<h2><a class="anchor" aria-hidden="true" id="parser-deprecated"></a><a href="#parser-deprecated" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2- [...]
+<h2><a class="anchor" aria-hidden="true" id="parser"></a><a href="#parser" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.64 1-2. [...]
 <blockquote>
-<p>Even though the Parser is deprecated, it is still useful especially
-for particular ingestion methods or data formats. For example, you
-should use the Parser with Hadoop ingestion since the <a href="#input-format"><code>inputFormat</code></a>
-is not supported yet with Hadoop ingestion.
-Some data formats such as Protocol Buffer or Avro are not supported by the <code>inputFormat</code> yet as well.</p>
+<p>The Parser is deprecated for <a href="/docs/latest/ingestion/native-batch.html">native batch tasks</a>, <a href="/docs/latest/development/extensions-core/kafka-ingestion.html">Kafka indexing service</a>,
+and <a href="/docs/latest/development/extensions-core/kinesis-ingestion.html">Kinesis indexing service</a>.
+Consider using the <a href="#input-format">input format</a> instead for these types of ingestion.</p>
 </blockquote>
 <p>This section lists all default and core extension parsers.
 For community extension parsers, please see our <a href="../development/extensions.html#community-extensions">community extensions list</a>.</p>
 <h3><a class="anchor" aria-hidden="true" id="string-parser"></a><a href="#string-parser" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.8 [...]
 <p><code>string</code> typed parsers operate on text based inputs that can be split into individual records by newlines.
-Each line can be further parsed using <a href="#parsespec-deprecated"><code>parseSpec</code></a>.</p>
+Each line can be further parsed using <a href="#parsespec"><code>parseSpec</code></a>.</p>
 <table>
 <thead>
 <tr><th>Field</th><th>Type</th><th>Description</th><th>Required</th></tr>
@@ -1027,12 +1025,11 @@ Details can be found in Schema Registry <a href="http://docs.confluent.io/curren
 </code></pre>
 <p>See the <a href="/docs/latest/development/extensions-core/protobuf.html">extension description</a> for
 more details and examples.</p>
-<h2><a class="anchor" aria-hidden="true" id="parsespec-deprecated"></a><a href="#parsespec-deprecated" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
+<h2><a class="anchor" aria-hidden="true" id="parsespec"></a><a href="#parsespec" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1.6 [...]
 <blockquote>
-<p>Even though the ParseSpec is deprecated, it is still useful especially
-for particular ingestion methods or data formats. For example, you
-should use the ParseSpec with Hadoop ingestion since the <a href="#input-format"><code>inputFormat</code></a>
-is not supported yet with Hadoop ingestion.</p>
+<p>The Parser is deprecated for <a href="/docs/latest/ingestion/native-batch.html">native batch tasks</a>, <a href="/docs/latest/development/extensions-core/kafka-ingestion.html">Kafka indexing service</a>,
+and <a href="/docs/latest/development/extensions-core/kinesis-ingestion.html">Kinesis indexing service</a>.
+Consider using the <a href="#input-format">input format</a> instead for these types of ingestion.</p>
 </blockquote>
 <p>ParseSpecs serve two purposes:</p>
 <ul>
@@ -1231,7 +1228,7 @@ handle all formatting decisions on their own, without using the ParseSpec.</p>
 <tr><td>flattenSpec</td><td>JSON Object</td><td>Specifies flattening configuration for nested JSON data. See <a href="#flattenspec"><code>flattenSpec</code></a> for more info.</td><td>no</td></tr>
 </tbody>
 </table>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/ingestion/index.html"><span class="arrow-prev">← </span><span>Ingestion</span></a><a class="docs-next button" href="/docs/latest/ingestion/schema-design.html"><span>Schema design tips</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#formatting-the-data">Formatting the Data</a></li><li><a href="#custom-formats">Cu [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/ingestion/index.html"><span class="arrow-prev">← </span><span>Ingestion</span></a><a class="docs-next button" href="/docs/latest/ingestion/schema-design.html"><span>Schema design tips</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#formatting-the-data">Formatting the Data</a></li><li><a href="#custom-formats">Cu [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/latest/ingestion/hadoop.html b/docs/latest/ingestion/hadoop.html
index 5d07937..b93e3e4 100644
--- a/docs/latest/ingestion/hadoop.html
+++ b/docs/latest/ingestion/hadoop.html
@@ -194,11 +194,38 @@ what it should contain.</p>
 <p>For example, using the static input paths:</p>
 <pre><code class="hljs"><span class="hljs-string">"paths"</span> : "<span class="hljs-type">hdfs</span>://path/to/data/<span class="hljs-keyword">is</span>/here/data.gz,hdfs://path/to/data/<span class="hljs-keyword">is</span>/here/moredata.gz,hdfs://path/to/data/<span class="hljs-keyword">is</span>/here/evenmoredata.gz<span class="hljs-string">"
 </span></code></pre>
-<p>You can also read from cloud storage such as AWS S3 or Google Cloud Storage.</p>
+<p>You can also read from cloud storage such as AWS S3 or Google Cloud Storage.
+To do so, you need to install the necessary library under Druid's classpath in <em>all MiddleManager or Indexer processes</em>.
+For S3, you can run the below command to install the <a href="https://hadoop.apache.org/docs/current/hadoop-aws/tools/hadoop-aws/index.html">Hadoop AWS module</a>.</p>
+<pre><code class="hljs css language-bash">java -classpath <span class="hljs-string">"<span class="hljs-variable">${DRUID_HOME}</span>lib/*"</span> org.apache.druid.cli.Main tools pull-deps -h <span class="hljs-string">"org.apache.hadoop:hadoop-aws:<span class="hljs-variable">${HADOOP_VERSION}</span>"</span>;
+cp <span class="hljs-variable">${DRUID_HOME}</span>/hadoop-dependencies/hadoop-aws/<span class="hljs-variable">${HADOOP_VERSION}</span>/hadoop-aws-<span class="hljs-variable">${HADOOP_VERSION}</span>.jar <span class="hljs-variable">${DRUID_HOME}</span>/extensions/druid-hdfs-storage/
+</code></pre>
+<p>Once you install the Hadoop AWS module in all MiddleManager and Indexer processes, you can put
+your S3 paths in the inputSpec with the below job properties.
+For more configurations, see the <a href="https://hadoop.apache.org/docs/current/hadoop-aws/tools/hadoop-aws/index.html">Hadoop AWS module</a>.</p>
 <pre><code class="hljs"><span class="hljs-string">"paths"</span> : "<span class="hljs-type">s3a</span>://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/data.gz,s3a://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/moredata.gz,s3a://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/evenmoredata.gz<span class="hljs-string">"
 </span></code></pre>
+<pre><code class="hljs css language-json">"jobProperties" : {
+  "fs.s3a.impl" : "org.apache.hadoop.fs.s3a.S3AFileSystem",
+  "fs.AbstractFileSystem.s3a.impl" : "org.apache.hadoop.fs.s3a.S3A",
+  "fs.s3a.access.key" : "YOUR_ACCESS_KEY",
+  "fs.s3a.secret.key" : "YOUR_SECRET_KEY"
+}
+</code></pre>
+<p>For Google Cloud Storage, you need to install <a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md">GCS connector jar</a>
+under <code>${DRUID_HOME}/hadoop-dependencies</code> in <em>all MiddleManager or Indexer processes</em>.
+Once you install the GCS Connector jar in all MiddleManager and Indexer processes, you can put
+your Google Cloud Storage paths in the inputSpec with the below job properties.
+For more configurations, see the <a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/INSTALL.md#configure-hadoop">instructions to configure Hadoop</a>,
+<a href="https://github.com/GoogleCloudPlatform/bigdata-interop/blob/master/gcs/conf/gcs-core-default.xml">GCS core default</a>
+and <a href="https://github.com/GoogleCloudPlatform/bdutil/blob/master/conf/hadoop2/gcs-core-template.xml">GCS core template</a>.</p>
 <pre><code class="hljs"><span class="hljs-string">"paths"</span> : "<span class="hljs-type">gs</span>://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/data.gz,gs://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/moredata.gz,gs://billy-bucket/the/data/<span class="hljs-keyword">is</span>/here/evenmoredata.gz<span class="hljs-string">"
 </span></code></pre>
+<pre><code class="hljs css language-json">"jobProperties" : {
+  "fs.gs.impl" : "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFileSystem",
+  "fs.AbstractFileSystem.gs.impl" : "com.google.cloud.hadoop.fs.gcs.GoogleHadoopFS"
+}
+</code></pre>
 <h4><a class="anchor" aria-hidden="true" id="granularity"></a><a href="#granularity" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42 [...]
 <p>A type of inputSpec that expects data to be organized in directories according to datetime using the path format: <code>y=XXXX/m=XX/d=XX/H=XX/M=XX/S=XX</code> (where date is represented by lowercase and time is represented by uppercase).</p>
 <table>
diff --git a/docs/latest/ingestion/index.html b/docs/latest/ingestion/index.html
index cab763d..6ea3dcd 100644
--- a/docs/latest/ingestion/index.html
+++ b/docs/latest/ingestion/index.html
@@ -306,9 +306,9 @@ documentation for each <a href="#ingestion-methods">ingestion method</a>.</li>
 <li><a href="#tuningconfig"><code>tuningConfig</code></a>, which controls various tuning parameters specific to each
 <a href="#ingestion-methods">ingestion method</a>.</li>
 </ul>
-<p>Example ingestion spec for task type <code>parallel_index</code> (native batch):</p>
+<p>Example ingestion spec for task type <code>index_parallel</code> (native batch):</p>
 <pre><code class="hljs">{
-  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"parallel_index"</span>,
+  <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span>: {
     <span class="hljs-attr">"dataSchema"</span>: {
       <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"wikipedia"</span>,
@@ -337,7 +337,7 @@ documentation for each <a href="#ingestion-methods">ingestion method</a>.</li>
       }
     },
     <span class="hljs-attr">"ioConfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"parallel_index"</span>,
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"inputSource"</span>: {
         <span class="hljs-attr">"type"</span>: <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span>: <span class="hljs-string">"examples/indexing/"</span>,
@@ -354,7 +354,7 @@ documentation for each <a href="#ingestion-methods">ingestion method</a>.</li>
       }
     },
     <span class="hljs-attr">"tuningConfig"</span>: {
-      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"parallel_index"</span>
+      <span class="hljs-attr">"type"</span>: <span class="hljs-string">"index_parallel"</span>
     }
   }
 }
diff --git a/docs/latest/ingestion/native-batch.html b/docs/latest/ingestion/native-batch.html
index e313bd3..42e4760 100644
--- a/docs/latest/ingestion/native-batch.html
+++ b/docs/latest/ingestion/native-batch.html
@@ -358,6 +358,10 @@ falling in the same time chunk and the same hash value from multiple MiddleManag
 them to create the final segments. Finally, they push the final segments to the deep storage at once.</li>
 </ul>
 <h4><a class="anchor" aria-hidden="true" id="single-dimension-range-partitioning"></a><a href="#single-dimension-range-partitioning" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2 [...]
+<blockquote>
+<p>Single dimension range partitioning is currently not supported in the sequential mode of the Parallel task.
+Try set <code>maxNumConcurrentSubTasks</code> to larger than 1 to use this partitioning.</p>
+</blockquote>
 <table>
 <thead>
 <tr><th>property</th><th>description</th><th>default</th><th>required?</th></tr>
@@ -1037,6 +1041,9 @@ where each worker task of <code>index_parallel</code> will read a single file.</
 <tr><td>paths</td><td>HDFS paths. Can be either a JSON array or comma-separated string of paths. Wildcards like <code>*</code> are supported in these paths.</td><td>None</td><td>yes</td></tr>
 </tbody>
 </table>
+<p>You can also ingest from cloud storage using the HDFS input source.
+However, if you want to read from AWS S3 or Google Cloud Storage, consider using
+the <a href="#s3-input-source">S3 input source</a> or the <a href="#google-cloud-storage-input-source">Google Cloud Storage input source</a> instead.</p>
 <h3><a class="anchor" aria-hidden="true" id="http-input-source"></a><a href="#http-input-source" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2- [...]
 <p>The HDFS input source is to support reading files directly
 from remote sites via HTTP.
diff --git a/docs/latest/operations/other-hadoop.html b/docs/latest/operations/other-hadoop.html
index 888b0aa..fed3e40 100644
--- a/docs/latest/operations/other-hadoop.html
+++ b/docs/latest/operations/other-hadoop.html
@@ -101,7 +101,7 @@ there is a conflict.</p>
 <p>These properties can be set in either one of the following ways:</p>
 <ul>
 <li>Using the task definition, e.g. add <code>&quot;mapreduce.job.classloader&quot;: &quot;true&quot;</code> to the <code>jobProperties</code> of the <code>tuningConfig</code> of your indexing task (see the <a href="/docs/latest/ingestion/hadoop.html">Hadoop batch ingestion documentation</a>).</li>
-<li>Using system properties, e.g. on the MiddleManager set <code>druid.indexer.runner.javaOpts=... -Dhadoop.mapreduce.job.classloader=true</code>.</li>
+<li>Using system properties, e.g. on the MiddleManager set <code>druid.indexer.runner.javaOpts=... -Dhadoop.mapreduce.job.classloader=true</code> in <a href="/docs/latest/configuration/index.html#middlemanager-configuration">Middle Manager configuration</a>.</li>
 </ul>
 <h3><a class="anchor" aria-hidden="true" id="overriding-specific-classes"></a><a href="#overriding-specific-classes" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 1 [...]
 <p>When <code>mapreduce.job.classloader = true</code>, it is also possible to specifically define which classes should be loaded from the hadoop system classpath and which should be loaded from job-supplied JARs.</p>
diff --git a/docs/latest/querying/aggregations.html b/docs/latest/querying/aggregations.html
index 9b56ff6..62a51f5 100644
--- a/docs/latest/querying/aggregations.html
+++ b/docs/latest/querying/aggregations.html
@@ -136,7 +136,7 @@ query time.</p>
 <p>(Double/Float/Long) First and Last aggregator cannot be used in ingestion spec, and should only be specified as part of queries.</p>
 <p>Note that queries with first/last aggregators on a segment created with rollup enabled will return the rolled up value, and not the last value within the raw ingested data.</p>
 <h4><a class="anchor" aria-hidden="true" id="doublefirst-aggregator"></a><a href="#doublefirst-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0 [...]
-<p><code>doubleFirst</code> computes the metric value with the minimum timestamp or 0 if no row exist</p>
+<p><code>doubleFirst</code> computes the metric value with the minimum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"doubleFirst"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -144,7 +144,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="doublelast-aggregator"></a><a href="#doublelast-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2 [...]
-<p><code>doubleLast</code> computes the metric value with the maximum timestamp or 0 if no row exist</p>
+<p><code>doubleLast</code> computes the metric value with the maximum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"doubleLast"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -152,7 +152,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="floatfirst-aggregator"></a><a href="#floatfirst-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2 [...]
-<p><code>floatFirst</code> computes the metric value with the minimum timestamp or 0 if no row exist</p>
+<p><code>floatFirst</code> computes the metric value with the minimum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"floatFirst"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -160,7 +160,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="floatlast-aggregator"></a><a href="#floatlast-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
-<p><code>floatLast</code> computes the metric value with the maximum timestamp or 0 if no row exist</p>
+<p><code>floatLast</code> computes the metric value with the maximum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"floatLast"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -168,7 +168,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="longfirst-aggregator"></a><a href="#longfirst-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1 [...]
-<p><code>longFirst</code> computes the metric value with the minimum timestamp or 0 if no row exist</p>
+<p><code>longFirst</code> computes the metric value with the minimum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"longFirst"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -176,7 +176,7 @@ query time.</p>
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="longlast-aggregator"></a><a href="#longlast-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.2 [...]
-<p><code>longLast</code> computes the metric value with the maximum timestamp or 0 if no row exist</p>
+<p><code>longLast</code> computes the metric value with the maximum timestamp or 0 in default mode or <code>null</code> in SQL compatible mode if no row exist</p>
 <pre><code class="hljs css language-json">{
   <span class="hljs-attr">"type"</span> : <span class="hljs-string">"longLast"</span>,
   <span class="hljs-attr">"name"</span> : &lt;output_name&gt;,
@@ -189,8 +189,7 @@ query time.</p>
   "type" : "stringFirst",
   "name" : &lt;output_name&gt;,
   "fieldName" : &lt;metric_name&gt;,
-  "maxStringBytes" : &lt;integer&gt; # (optional, defaults to 1024),
-  "filterNullValues" : &lt;boolean&gt; # (optional, defaults to false)
+  "maxStringBytes" : &lt;integer&gt; # (optional, defaults to 1024)
 }
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="stringlast-aggregator"></a><a href="#stringlast-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2 [...]
@@ -199,8 +198,7 @@ query time.</p>
   "type" : "stringLast",
   "name" : &lt;output_name&gt;,
   "fieldName" : &lt;metric_name&gt;,
-  "maxStringBytes" : &lt;integer&gt; # (optional, defaults to 1024),
-  "filterNullValues" : &lt;boolean&gt; # (optional, defaults to false)
+  "maxStringBytes" : &lt;integer&gt; # (optional, defaults to 1024)
 }
 </code></pre>
 <h3><a class="anchor" aria-hidden="true" id="javascript-aggregator"></a><a href="#javascript-aggregator" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2 [...]
diff --git a/docs/latest/querying/sql.html b/docs/latest/querying/sql.html
index 186fd5e..1eb2703 100644
--- a/docs/latest/querying/sql.html
+++ b/docs/latest/querying/sql.html
@@ -231,9 +231,9 @@ possible for two aggregators in the same SQL query to have different filters.</p
 <tr><td><code>STDDEV_POP(expr)</code></td><td>Computes standard deviation population of <code>expr</code>. See <a href="../development/extensions-core/stats.html">stats extension</a> documentation for additional details.</td></tr>
 <tr><td><code>STDDEV_SAMP(expr)</code></td><td>Computes standard deviation sample of <code>expr</code>. See <a href="../development/extensions-core/stats.html">stats extension</a> documentation for additional details.</td></tr>
 <tr><td><code>STDDEV(expr)</code></td><td>Computes standard deviation sample of <code>expr</code>. See <a href="../development/extensions-core/stats.html">stats extension</a> documentation for additional details.</td></tr>
-<tr><td><code>EARLIEST(expr)</code></td><td>Returns the earliest non-null value of <code>expr</code>, which must be numeric. If <code>expr</code> comes from a relation with a timestamp column (like a Druid datasource) then &quot;earliest&quot; is the value first encountered with the minimum overall timestamp of all values being aggregated. If <code>expr</code> does not come from a relation with a timestamp, then it is simply the first value encountered.</td></tr>
+<tr><td><code>EARLIEST(expr)</code></td><td>Returns the earliest value of <code>expr</code>, which must be numeric. If <code>expr</code> comes from a relation with a timestamp column (like a Druid datasource) then &quot;earliest&quot; is the value first encountered with the minimum overall timestamp of all values being aggregated. If <code>expr</code> does not come from a relation with a timestamp, then it is simply the first value encountered.</td></tr>
 <tr><td><code>EARLIEST(expr, maxBytesPerString)</code></td><td>Like <code>EARLIEST(expr)</code>, but for strings. The <code>maxBytesPerString</code> parameter determines how much aggregation space to allocate per string. Strings longer than this limit will be truncated. This parameter should be set as low as possible, since high values will lead to wasted memory.</td></tr>
-<tr><td><code>LATEST(expr)</code></td><td>Returns the latest non-null value of <code>expr</code>, which must be numeric. If <code>expr</code> comes from a relation with a timestamp column (like a Druid datasource) then &quot;latest&quot; is the value last encountered with the maximum overall timestamp of all values being aggregated. If <code>expr</code> does not come from a relation with a timestamp, then it is simply the last value encountered.</td></tr>
+<tr><td><code>LATEST(expr)</code></td><td>Returns the latest value of <code>expr</code>, which must be numeric. If <code>expr</code> comes from a relation with a timestamp column (like a Druid datasource) then &quot;latest&quot; is the value last encountered with the maximum overall timestamp of all values being aggregated. If <code>expr</code> does not come from a relation with a timestamp, then it is simply the last value encountered.</td></tr>
 <tr><td><code>LATEST(expr, maxBytesPerString)</code></td><td>Like <code>LATEST(expr)</code>, but for strings. The <code>maxBytesPerString</code> parameter determines how much aggregation space to allocate per string. Strings longer than this limit will be truncated. This parameter should be set as low as possible, since high values will lead to wasted memory.</td></tr>
 </tbody>
 </table>
diff --git a/docs/latest/tutorials/tutorial-batch.html b/docs/latest/tutorials/tutorial-batch.html
index a6f792d..a8cf205 100644
--- a/docs/latest/tutorials/tutorial-batch.html
+++ b/docs/latest/tutorials/tutorial-batch.html
@@ -142,42 +142,36 @@ At this point, you can go to the <code>Query</code> view to run SQL queries agai
 <p>The Druid package includes the following sample native batch ingestion task spec at <code>quickstart/tutorial/wikipedia-index.json</code>, shown here for convenience,
 which has been configured to read the <code>quickstart/tutorial/wikiticker-2015-09-12-sampled.json.gz</code> input file:</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"wikipedia"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span> : [
-              <span class="hljs-string">"channel"</span>,
-              <span class="hljs-string">"cityName"</span>,
-              <span class="hljs-string">"comment"</span>,
-              <span class="hljs-string">"countryIsoCode"</span>,
-              <span class="hljs-string">"countryName"</span>,
-              <span class="hljs-string">"isAnonymous"</span>,
-              <span class="hljs-string">"isMinor"</span>,
-              <span class="hljs-string">"isNew"</span>,
-              <span class="hljs-string">"isRobot"</span>,
-              <span class="hljs-string">"isUnpatrolled"</span>,
-              <span class="hljs-string">"metroCode"</span>,
-              <span class="hljs-string">"namespace"</span>,
-              <span class="hljs-string">"page"</span>,
-              <span class="hljs-string">"regionIsoCode"</span>,
-              <span class="hljs-string">"regionName"</span>,
-              <span class="hljs-string">"user"</span>,
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"added"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"deleted"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"delta"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
-            ]
-          },
-          <span class="hljs-attr">"timestampSpec"</span>: {
-            <span class="hljs-attr">"column"</span>: <span class="hljs-string">"time"</span>,
-            <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
-          }
-        }
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span> : [
+          <span class="hljs-string">"channel"</span>,
+          <span class="hljs-string">"cityName"</span>,
+          <span class="hljs-string">"comment"</span>,
+          <span class="hljs-string">"countryIsoCode"</span>,
+          <span class="hljs-string">"countryName"</span>,
+          <span class="hljs-string">"isAnonymous"</span>,
+          <span class="hljs-string">"isMinor"</span>,
+          <span class="hljs-string">"isNew"</span>,
+          <span class="hljs-string">"isRobot"</span>,
+          <span class="hljs-string">"isUnpatrolled"</span>,
+          <span class="hljs-string">"metroCode"</span>,
+          <span class="hljs-string">"namespace"</span>,
+          <span class="hljs-string">"page"</span>,
+          <span class="hljs-string">"regionIsoCode"</span>,
+          <span class="hljs-string">"regionName"</span>,
+          <span class="hljs-string">"user"</span>,
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"added"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"deleted"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"delta"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
+        ]
+      },
+      <span class="hljs-attr">"timestampSpec"</span>: {
+        <span class="hljs-attr">"column"</span>: <span class="hljs-string">"time"</span>,
+        <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
       },
       <span class="hljs-attr">"metricsSpec"</span> : [],
       <span class="hljs-attr">"granularitySpec"</span> : {
@@ -189,16 +183,19 @@ which has been configured to read the <code>quickstart/tutorial/wikiticker-2015-
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/tutorial/"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"wikiticker-2015-09-12-sampled.json.gz"</span>
       },
+      <span class="hljs-attr">"inputFormat"</span> :  {
+        <span class="hljs-attr">"type"</span>: <span class="hljs-string">"json"</span>
+      },
       <span class="hljs-attr">"appendToExisting"</span> : <span class="hljs-literal">false</span>
     },
     <span class="hljs-attr">"tuningConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
       <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>
     }
diff --git a/docs/latest/tutorials/tutorial-compaction.html b/docs/latest/tutorials/tutorial-compaction.html
index 2326a6e..db7aab3 100644
--- a/docs/latest/tutorials/tutorial-compaction.html
+++ b/docs/latest/tutorials/tutorial-compaction.html
@@ -114,7 +114,7 @@ Retrieved 1 row <span class="hljs-keyword">in</span> 1.38s.
   <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"compaction-tutorial"</span>,
   <span class="hljs-attr">"interval"</span>: <span class="hljs-string">"2015-09-12/2015-09-13"</span>,
   <span class="hljs-attr">"tuningConfig"</span> : {
-    <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+    <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
     <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
     <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>
   }
@@ -153,7 +153,7 @@ Retrieved 1 row <span class="hljs-keyword">in</span> 1.30s.
   <span class="hljs-attr">"interval"</span>: <span class="hljs-string">"2015-09-12/2015-09-13"</span>,
   <span class="hljs-attr">"segmentGranularity"</span>: <span class="hljs-string">"DAY"</span>,
   <span class="hljs-attr">"tuningConfig"</span> : {
-    <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+    <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
     <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
     <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>,
     <span class="hljs-attr">"forceExtendableShardSpecs"</span> : <span class="hljs-literal">true</span>
diff --git a/docs/latest/tutorials/tutorial-ingestion-spec.html b/docs/latest/tutorials/tutorial-ingestion-spec.html
index 95880fc..69257d0 100644
--- a/docs/latest/tutorials/tutorial-ingestion-spec.html
+++ b/docs/latest/tutorials/tutorial-ingestion-spec.html
@@ -120,33 +120,14 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 }
 </code></pre>
 <p>Let's call the tutorial datasource <code>ingestion-tutorial</code>.</p>
-<h3><a class="anchor" aria-hidden="true" id="choose-a-parser"></a><a href="#choose-a-parser" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5  [...]
-<p>A <code>dataSchema</code> has a <code>parser</code> field, which defines the parser that Druid will use to interpret the input data.</p>
-<p>Since our input data is represented as JSON strings, we'll use a <code>string</code> parser with <code>json</code> format:</p>
-<pre><code class="hljs css language-json">"dataSchema" : {
-  "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json"
-    }
-  }
-}
-</code></pre>
 <h3><a class="anchor" aria-hidden="true" id="time-column"></a><a href="#time-column" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42 [...]
-<p>The <code>parser</code> needs to know how to extract the main timestamp field from the input data. When using a <code>json</code> type <code>parseSpec</code>, the timestamp is defined in a <code>timestampSpec</code>.</p>
-<p>The timestamp column in our input data is named &quot;ts&quot;, containing ISO 8601 timestamps, so let's add a <code>timestampSpec</code> with that information to the <code>parseSpec</code>:</p>
+<p>The <code>dataSchema</code> needs to know how to extract the main timestamp field from the input data.</p>
+<p>The timestamp column in our input data is named &quot;ts&quot;, containing ISO 8601 timestamps, so let's add a <code>timestampSpec</code> with that information to the <code>dataSchema</code>:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
   }
 }
 </code></pre>
@@ -161,24 +142,16 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <li><p>If rollup is disabled, then all columns are treated as &quot;dimensions&quot; and no pre-aggregation occurs.</p></li>
 </ul>
 <p>For this tutorial, let's enable rollup. This is specified with a <code>granularitySpec</code> on the <code>dataSchema</code>.</p>
-<p>Note that the <code>granularitySpec</code> lies outside of the <code>parser</code>. We will revisit the <code>parser</code> soon when we define our dimensions and metrics.</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
   },
   "granularitySpec" : {
     "rollup" : true
   }
 }
-
 </code></pre>
 <h4><a class="anchor" aria-hidden="true" id="choosing-dimensions-and-metrics"></a><a href="#choosing-dimensions-and-metrics" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 [...]
 <p>For this example dataset, the following is a sensible split for &quot;dimensions&quot; and &quot;metrics&quot;:</p>
@@ -189,27 +162,21 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>The dimensions here are a group of properties that identify a unidirectional flow of IP traffic, while the metrics represent facts about the IP traffic flow specified by a dimension grouping.</p>
 <p>Let's look at how to define these dimensions and metrics within the ingestion spec.</p>
 <h4><a class="anchor" aria-hidden="true" id="dimensions"></a><a href="#dimensions" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1 [...]
-<p>Dimensions are specified with a <code>dimensionsSpec</code> inside the <code>parseSpec</code>.</p>
+<p>Dimensions are specified with a <code>dimensionsSpec</code> inside the <code>dataSchema</code>.</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "granularitySpec" : {
     "rollup" : true
@@ -230,24 +197,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>Metrics are specified with a <code>metricsSpec</code> inside the <code>dataSchema</code>:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "metricsSpec" : [
     { "type" : "count", "name" : "count" },
@@ -291,24 +252,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>Segment granularity is configured by the <code>segmentGranularity</code> property in the <code>granularitySpec</code>. For this tutorial, we'll create hourly segments:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "metricsSpec" : [
     { "type" : "count", "name" : "count" },
@@ -328,24 +283,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>The query granularity is configured by the <code>queryGranularity</code> property in the <code>granularitySpec</code>. For this tutorial, let's use minute granularity:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "metricsSpec" : [
     { "type" : "count", "name" : "count" },
@@ -356,7 +305,7 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
   "granularitySpec" : {
     "type" : "uniform",
     "segmentGranularity" : "HOUR",
-    "queryGranularity" : "MINUTE"
+    "queryGranularity" : "MINUTE",
     "rollup" : true
   }
 }
@@ -372,24 +321,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>The interval is also specified in the <code>granularitySpec</code>:</p>
 <pre><code class="hljs css language-json">"dataSchema" : {
   "dataSource" : "ingestion-tutorial",
-  "parser" : {
-    "type" : "string",
-    "parseSpec" : {
-      "format" : "json",
-      "timestampSpec" : {
-        "format" : "iso",
-        "column" : "ts"
-      },
-      "dimensionsSpec" : {
-        "dimensions": [
-          "srcIP",
-          { "name" : "srcPort", "type" : "long" },
-          { "name" : "dstIP", "type" : "string" },
-          { "name" : "dstPort", "type" : "long" },
-          { "name" : "protocol", "type" : "string" }
-        ]
-      }
-    }
+  "timestampSpec" : {
+    "format" : "iso",
+    "column" : "ts"
+  },
+  "dimensionsSpec" : {
+    "dimensions": [
+      "srcIP",
+      { "name" : "srcPort", "type" : "long" },
+      { "name" : "dstIP", "type" : "string" },
+      { "name" : "dstPort", "type" : "long" },
+      { "name" : "protocol", "type" : "string" }
+    ]
   },
   "metricsSpec" : [
     { "type" : "count", "name" : "count" },
@@ -410,28 +353,22 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>We've now finished defining our <code>dataSchema</code>. The remaining steps are to place the <code>dataSchema</code> we created into an ingestion task spec, and specify the input source.</p>
 <p>The <code>dataSchema</code> is shared across all task types, but each task type has its own specification format. For this tutorial, we will use the native batch ingestion task:</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"ingestion-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"timestampSpec"</span> : {
-            <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
-            <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
-          },
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span>: [
-              <span class="hljs-string">"srcIP"</span>,
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
-            ]
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span> : {
+        <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
+        <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span>: [
+          <span class="hljs-string">"srcIP"</span>,
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -451,39 +388,47 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 }
 </code></pre>
 <h2><a class="anchor" aria-hidden="true" id="define-the-input-source"></a><a href="#define-the-input-source" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 [...]
-<p>Now let's define our input source, which is specified in an <code>ioConfig</code> object. Each task type has its own type of <code>ioConfig</code>. The native batch task uses &quot;firehoses&quot; to read input data, so let's configure a &quot;local&quot; firehose to read the example netflow data we saved earlier:</p>
+<p>Now let's define our input source, which is specified in an <code>ioConfig</code> object. Each task type has its own type of <code>ioConfig</code>. To read input data, we need to specify an <code>inputSource</code>. The example netflow data we saved earlier needs to be read from a local file, which is configured below:</p>
 <pre><code class="hljs css language-json">    "ioConfig" : {
-      "type" : "index",
-      "firehose" : {
+      "type" : "index_parallel",
+      "inputSource" : {
         "type" : "local",
         "baseDir" : "quickstart/",
         "filter" : "ingestion-tutorial-data.json"
       }
     }
 </code></pre>
+<h3><a class="anchor" aria-hidden="true" id="define-the-format-of-the-data"></a><a href="#define-the-format-of-the-data" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12  [...]
+<p>Since our input data is represented as JSON strings, we'll use a <code>inputFormat</code> to <code>json</code> format:</p>
+<pre><code class="hljs css language-json">    "ioConfig" : {
+      "type" : "index_parallel",
+      "inputSource" : {
+        "type" : "local",
+        "baseDir" : "quickstart/",
+        "filter" : "ingestion-tutorial-data.json"
+      },
+      "inputFormat" : {
+        "type" : "json"
+      }
+    }
+</code></pre>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"ingestion-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"timestampSpec"</span> : {
-            <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
-            <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
-          },
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span>: [
-              <span class="hljs-string">"srcIP"</span>,
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
-            ]
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span> : {
+        <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
+        <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span>: [
+          <span class="hljs-string">"srcIP"</span>,
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -500,11 +445,14 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"ingestion-tutorial-data.json"</span>
+      },
+      <span class="hljs-attr">"inputFormat"</span> : {
+        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"json"</span>
       }
     }
   }
@@ -514,7 +462,7 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>Each ingestion task has a <code>tuningConfig</code> section that allows users to tune various ingestion parameters.</p>
 <p>As an example, let's add a <code>tuningConfig</code> that sets a target segment size for the native batch ingestion task:</p>
 <pre><code class="hljs css language-json">    "tuningConfig" : {
-      "type" : "index",
+      "type" : "index_parallel",
       "maxRowsPerSegment" : 5000000
     }
 </code></pre>
@@ -522,28 +470,22 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <h2><a class="anchor" aria-hidden="true" id="final-spec"></a><a href="#final-spec" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 12 13 12H9c-.98 0-2-1.22-2-2.5 0-.83.42-1 [...]
 <p>We've finished defining the ingestion spec, it should now look like the following:</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"ingestion-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"timestampSpec"</span> : {
-            <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
-            <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
-          },
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span>: [
-              <span class="hljs-string">"srcIP"</span>,
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
-            ]
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span> : {
+        <span class="hljs-attr">"format"</span> : <span class="hljs-string">"iso"</span>,
+        <span class="hljs-attr">"column"</span> : <span class="hljs-string">"ts"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span>: [
+          <span class="hljs-string">"srcIP"</span>,
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"srcPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstIP"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"dstPort"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span> : <span class="hljs-string">"protocol"</span>, <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -560,15 +502,18 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"ingestion-tutorial-data.json"</span>
+      },
+      <span class="hljs-attr">"inputFormat"</span> : {
+        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"json"</span>
       }
     },
     <span class="hljs-attr">"tuningConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>
     }
   }
@@ -598,7 +543,7 @@ Retrieved 5 rows <span class="hljs-keyword">in</span> 0.12s.
 
 dsql&gt;
 </code></pre>
-</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/tutorials/tutorial-delete-data.html"><span class="arrow-prev">← </span><span>Deleting data</span></a><a class="docs-next button" href="/docs/latest/tutorials/tutorial-transform-spec.html"><span>Transforming input data</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#example-data">Example data</a></li><li><a href= [...]
+</span></div></article></div><div class="docs-prevnext"><a class="docs-prev button" href="/docs/latest/tutorials/tutorial-delete-data.html"><span class="arrow-prev">← </span><span>Deleting data</span></a><a class="docs-next button" href="/docs/latest/tutorials/tutorial-transform-spec.html"><span>Transforming input data</span><span class="arrow-next"> →</span></a></div></div></div><nav class="onPageNav"><ul class="toc-headings"><li><a href="#example-data">Example data</a></li><li><a href= [...]
                 document.addEventListener('keyup', function(e) {
                   if (e.target !== document.body) {
                     return;
diff --git a/docs/latest/tutorials/tutorial-kafka.html b/docs/latest/tutorials/tutorial-kafka.html
index 66aeaef..cbd636b 100644
--- a/docs/latest/tutorials/tutorial-kafka.html
+++ b/docs/latest/tutorials/tutorial-kafka.html
@@ -168,38 +168,32 @@ Similarly, you can also edit the spec directly and see it reflected in the previ
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span>: {
       <span class="hljs-attr">"dataSource"</span>: <span class="hljs-string">"wikipedia"</span>,
-      <span class="hljs-attr">"parser"</span>: {
-        <span class="hljs-attr">"type"</span>: <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span>: {
-          <span class="hljs-attr">"format"</span>: <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"timestampSpec"</span>: {
-            <span class="hljs-attr">"column"</span>: <span class="hljs-string">"time"</span>,
-            <span class="hljs-attr">"format"</span>: <span class="hljs-string">"auto"</span>
-          },
-          <span class="hljs-attr">"dimensionsSpec"</span>: {
-            <span class="hljs-attr">"dimensions"</span>: [
-              <span class="hljs-string">"channel"</span>,
-              <span class="hljs-string">"cityName"</span>,
-              <span class="hljs-string">"comment"</span>,
-              <span class="hljs-string">"countryIsoCode"</span>,
-              <span class="hljs-string">"countryName"</span>,
-              <span class="hljs-string">"isAnonymous"</span>,
-              <span class="hljs-string">"isMinor"</span>,
-              <span class="hljs-string">"isNew"</span>,
-              <span class="hljs-string">"isRobot"</span>,
-              <span class="hljs-string">"isUnpatrolled"</span>,
-              <span class="hljs-string">"metroCode"</span>,
-              <span class="hljs-string">"namespace"</span>,
-              <span class="hljs-string">"page"</span>,
-              <span class="hljs-string">"regionIsoCode"</span>,
-              <span class="hljs-string">"regionName"</span>,
-              <span class="hljs-string">"user"</span>,
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"added"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"deleted"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"delta"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
-            ]
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span>: {
+        <span class="hljs-attr">"column"</span>: <span class="hljs-string">"time"</span>,
+        <span class="hljs-attr">"format"</span>: <span class="hljs-string">"auto"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span>: {
+        <span class="hljs-attr">"dimensions"</span>: [
+          <span class="hljs-string">"channel"</span>,
+          <span class="hljs-string">"cityName"</span>,
+          <span class="hljs-string">"comment"</span>,
+          <span class="hljs-string">"countryIsoCode"</span>,
+          <span class="hljs-string">"countryName"</span>,
+          <span class="hljs-string">"isAnonymous"</span>,
+          <span class="hljs-string">"isMinor"</span>,
+          <span class="hljs-string">"isNew"</span>,
+          <span class="hljs-string">"isRobot"</span>,
+          <span class="hljs-string">"isUnpatrolled"</span>,
+          <span class="hljs-string">"metroCode"</span>,
+          <span class="hljs-string">"namespace"</span>,
+          <span class="hljs-string">"page"</span>,
+          <span class="hljs-string">"regionIsoCode"</span>,
+          <span class="hljs-string">"regionName"</span>,
+          <span class="hljs-string">"user"</span>,
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"added"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"deleted"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> },
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"delta"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [],
       <span class="hljs-attr">"granularitySpec"</span>: {
@@ -215,6 +209,9 @@ Similarly, you can also edit the spec directly and see it reflected in the previ
     },
     <span class="hljs-attr">"ioConfig"</span>: {
       <span class="hljs-attr">"topic"</span>: <span class="hljs-string">"wikipedia"</span>,
+      <span class="hljs-attr">"inputFormat"</span>: {
+        <span class="hljs-attr">"type"</span>: <span class="hljs-string">"json"</span>
+      },
       <span class="hljs-attr">"replicas"</span>: <span class="hljs-number">2</span>,
       <span class="hljs-attr">"taskDuration"</span>: <span class="hljs-string">"PT10M"</span>,
       <span class="hljs-attr">"completionTimeout"</span>: <span class="hljs-string">"PT20M"</span>,
diff --git a/docs/latest/tutorials/tutorial-rollup.html b/docs/latest/tutorials/tutorial-rollup.html
index fda119d..8cd84c8 100644
--- a/docs/latest/tutorials/tutorial-rollup.html
+++ b/docs/latest/tutorials/tutorial-rollup.html
@@ -97,25 +97,19 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <p>A file containing this sample input data is located at <code>quickstart/tutorial/rollup-data.json</code>.</p>
 <p>We'll ingest this data using the following ingestion task spec, located at <code>quickstart/tutorial/rollup-index.json</code>.</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"rollup-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span> : [
-              <span class="hljs-string">"srcIP"</span>,
-              <span class="hljs-string">"dstIP"</span>
-            ]
-          },
-          <span class="hljs-attr">"timestampSpec"</span>: {
-            <span class="hljs-attr">"column"</span>: <span class="hljs-string">"timestamp"</span>,
-            <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
-          }
-        }
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span> : [
+          <span class="hljs-string">"srcIP"</span>,
+          <span class="hljs-string">"dstIP"</span>
+        ]
+      },
+      <span class="hljs-attr">"timestampSpec"</span>: {
+        <span class="hljs-attr">"column"</span>: <span class="hljs-string">"timestamp"</span>,
+        <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -131,16 +125,19 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/tutorial"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"rollup-data.json"</span>
       },
+      <span class="hljs-attr">"inputFormat"</span> : {
+        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"json"</span>
+      },
       <span class="hljs-attr">"appendToExisting"</span> : <span class="hljs-literal">false</span>
     },
     <span class="hljs-attr">"tuningConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
       <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>
     }
diff --git a/docs/latest/tutorials/tutorial-transform-spec.html b/docs/latest/tutorials/tutorial-transform-spec.html
index 66267ff..e45d044 100644
--- a/docs/latest/tutorials/tutorial-transform-spec.html
+++ b/docs/latest/tutorials/tutorial-transform-spec.html
@@ -91,25 +91,19 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
 <h2><a class="anchor" aria-hidden="true" id="load-data-with-transform-specs"></a><a href="#load-data-with-transform-specs" aria-hidden="true" class="hash-link"><svg class="hash-link-icon" aria-hidden="true" height="16" version="1.1" viewBox="0 0 16 16" width="16"><path fill-rule="evenodd" d="M4 9h1v1H4c-1.5 0-3-1.69-3-3.5S2.55 3 4 3h4c1.45 0 3 1.69 3 3.5 0 1.41-.91 2.72-2 3.25V8.59c.58-.45 1-1.27 1-2.09C10 5.22 8.98 4 8 4H4c-.98 0-2 1.22-2 2.5S3 9 4 9zm9-3h-1v1h1c1 0 2 1.22 2 2.5S13.98 1 [...]
 <p>We will ingest the sample data using the following spec, which demonstrates the use of transform specs:</p>
 <pre><code class="hljs css language-json">{
-  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+  <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
   <span class="hljs-attr">"spec"</span> : {
     <span class="hljs-attr">"dataSchema"</span> : {
       <span class="hljs-attr">"dataSource"</span> : <span class="hljs-string">"transform-tutorial"</span>,
-      <span class="hljs-attr">"parser"</span> : {
-        <span class="hljs-attr">"type"</span> : <span class="hljs-string">"string"</span>,
-        <span class="hljs-attr">"parseSpec"</span> : {
-          <span class="hljs-attr">"format"</span> : <span class="hljs-string">"json"</span>,
-          <span class="hljs-attr">"dimensionsSpec"</span> : {
-            <span class="hljs-attr">"dimensions"</span> : [
-              <span class="hljs-string">"animal"</span>,
-              { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"location"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
-            ]
-          },
-          <span class="hljs-attr">"timestampSpec"</span>: {
-            <span class="hljs-attr">"column"</span>: <span class="hljs-string">"timestamp"</span>,
-            <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
-          }
-        }
+      <span class="hljs-attr">"timestampSpec"</span>: {
+        <span class="hljs-attr">"column"</span>: <span class="hljs-string">"timestamp"</span>,
+        <span class="hljs-attr">"format"</span>: <span class="hljs-string">"iso"</span>
+      },
+      <span class="hljs-attr">"dimensionsSpec"</span> : {
+        <span class="hljs-attr">"dimensions"</span> : [
+          <span class="hljs-string">"animal"</span>,
+          { <span class="hljs-attr">"name"</span>: <span class="hljs-string">"location"</span>, <span class="hljs-attr">"type"</span>: <span class="hljs-string">"long"</span> }
+        ]
       },
       <span class="hljs-attr">"metricsSpec"</span> : [
         { <span class="hljs-attr">"type"</span> : <span class="hljs-string">"count"</span>, <span class="hljs-attr">"name"</span> : <span class="hljs-string">"count"</span> },
@@ -147,16 +141,19 @@ the <a href="index.html">single-machine quickstart</a> and have it running on yo
       }
     },
     <span class="hljs-attr">"ioConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
-      <span class="hljs-attr">"firehose"</span> : {
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
+      <span class="hljs-attr">"inputSource"</span> : {
         <span class="hljs-attr">"type"</span> : <span class="hljs-string">"local"</span>,
         <span class="hljs-attr">"baseDir"</span> : <span class="hljs-string">"quickstart/tutorial"</span>,
         <span class="hljs-attr">"filter"</span> : <span class="hljs-string">"transform-data.json"</span>
       },
+      <span class="hljs-attr">"inputFormat"</span> : {
+        <span class="hljs-attr">"type"</span> :<span class="hljs-string">"json"</span>
+      },
       <span class="hljs-attr">"appendToExisting"</span> : <span class="hljs-literal">false</span>
     },
     <span class="hljs-attr">"tuningConfig"</span> : {
-      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index"</span>,
+      <span class="hljs-attr">"type"</span> : <span class="hljs-string">"index_parallel"</span>,
       <span class="hljs-attr">"maxRowsPerSegment"</span> : <span class="hljs-number">5000000</span>,
       <span class="hljs-attr">"maxRowsInMemory"</span> : <span class="hljs-number">25000</span>
     }


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@druid.apache.org
For additional commands, e-mail: commits-help@druid.apache.org