You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pekko.apache.org by jr...@apache.org on 2022/11/03 11:23:48 UTC

[incubator-pekko-http] 18/47: core: make a subset of parsers compile (Scala 3)

This is an automated email from the ASF dual-hosted git repository.

jrudolph pushed a commit to branch scala-3
in repository https://gitbox.apache.org/repos/asf/incubator-pekko-http.git

commit 56bb381e83481937d80766d7d66969b6d50dfe65
Author: Johannes Rudolph <jo...@gmail.com>
AuthorDate: Mon Mar 21 14:57:45 2022 +0100

    core: make a subset of parsers compile (Scala 3)
---
 .../http/impl/model/parser/Base64Parsing.scala     |  90 ++++
 .../akka/http/impl/model/parser/CommonRules.scala  | 475 +++++++++++++++++++++
 .../akka/http/impl/model/parser/HeaderParser.scala | 230 ++++++++++
 .../http/impl/model/parser/StringBuilding.scala    |  68 +++
 .../akka/http/impl/model/parser/UriParser.scala    | 254 +++++++++++
 5 files changed, 1117 insertions(+)

diff --git a/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/Base64Parsing.scala b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/Base64Parsing.scala
new file mode 100644
index 000000000..ae1f9ecaf
--- /dev/null
+++ b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/Base64Parsing.scala
@@ -0,0 +1,90 @@
+/*
+ * Copyright (C) 2009-2017 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.http.impl.model.parser
+
+import akka.annotation.InternalApi
+import akka.parboiled2.util.Base64
+import akka.parboiled2._
+
+/**
+ * INTERNAL API
+ *
+ * Rules for parsing Base-64 encoded strings.
+ */
+@InternalApi
+private[parser] trait Base64Parsing { this: Parser =>
+  import Base64Parsing._
+
+  /**
+   * Parses an RFC4045-encoded string and decodes it onto the value stack.
+   */
+  def rfc2045String: Rule1[Array[Byte]] = base64StringOrBlock(rfc2045Alphabet, rfc2045StringDecoder)
+
+  /**
+   * Parses an RFC4045-encoded string potentially containing newlines and decodes it onto the value stack.
+   */
+  def rfc2045Block: Rule1[Array[Byte]] = base64StringOrBlock(rfc2045Alphabet, rfc2045BlockDecoder)
+
+  /**
+   * Parses a akka.parboiled2.util.Base64.custom()-encoded string and decodes it onto the value stack.
+   */
+  def base64CustomString: Rule1[Array[Byte]] = base64StringOrBlock(customAlphabet, customStringDecoder)
+
+  /**
+   * Parses a akka.parboiled2.util.Base64.custom()-encoded string potentially containing newlines
+   * and decodes it onto the value stack.
+   */
+  def base64CustomBlock: Rule1[Array[Byte]] = base64StringOrBlock(customAlphabet, customBlockDecoder)
+
+  /**
+   * Parses a BASE64-encoded string with the given alphabet and decodes it onto the value
+   * stack using the given codec.
+   */
+  def base64StringOrBlock(alphabet: CharPredicate, decoder: Decoder): Rule1[Array[Byte]] = {
+    val start = cursor
+    rule {
+      oneOrMore(alphabet) ~ run {
+        decoder(input.sliceCharArray(start, cursor)) match {
+          case null  => MISMATCH
+          case bytes => push(bytes)
+        }
+      }
+    }
+  }
+}
+
+/** INTERNAL API */
+@InternalApi
+private[http] object Base64Parsing {
+  type Decoder = Array[Char] => Array[Byte]
+
+  val rfc2045Alphabet = CharPredicate(Base64.rfc2045().getAlphabet).asMaskBased
+  val customAlphabet = CharPredicate(Base64.custom().getAlphabet).asMaskBased
+
+  val rfc2045StringDecoder: Decoder = decodeString(Base64.rfc2045())
+  val customStringDecoder: Decoder = decodeString(Base64.custom())
+
+  val rfc2045BlockDecoder: Decoder = decodeBlock(Base64.rfc2045())
+  val customBlockDecoder: Decoder = decodeBlock(Base64.custom())
+
+  private val base64url = new Base64("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_=")
+  /** as described in RFC4648 5. - https://tools.ietf.org/html/rfc4648#section-5 */
+  val base64UrlStringDecoder: Decoder = decodeString(base64url)
+
+  def decodeString(codec: Base64)(chars: Array[Char]): Array[Byte] = codec.decodeFast(chars)
+  def decodeBlock(codec: Base64)(chars: Array[Char]): Array[Byte] = codec.decode(chars)
+}
diff --git a/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/CommonRules.scala b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/CommonRules.scala
new file mode 100644
index 000000000..64fd5f728
--- /dev/null
+++ b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/CommonRules.scala
@@ -0,0 +1,475 @@
+/*
+ * Copyright (C) 2009-2021 Lightbend Inc. <https://www.lightbend.com>
+ */
+
+package akka.http.impl.model.parser
+
+import scala.collection.immutable
+import scala.collection.immutable.TreeMap
+
+import akka.http.scaladsl.model._
+import akka.http.scaladsl.model.headers._
+import akka.parboiled2._
+import akka.parboiled2.support.hlist._
+
+private[parser] trait CommonRules { this: Parser with StringBuilding =>
+  import CharacterClasses._
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7230#section-1.2 referencing
+  // http://tools.ietf.org/html/rfc5234#appendix-B.1
+  // ******************************************************************************************
+  def CRLF = rule { CR ~ LF }
+
+  def OCTET = rule { ANY }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7230#section-3.2.3
+  // ******************************************************************************************
+
+  def OWS = rule { zeroOrMore(optional(CRLF) ~ oneOrMore(WSP)) } // extended with `obs-fold`
+
+  def RWS = rule { oneOrMore(optional(CRLF) ~ oneOrMore(WSP)) } // extended with `obs-fold`
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7230#section-3.2.6
+  // ******************************************************************************************
+  def word = rule { token | `quoted-string` }
+
+  def token: Rule1[String] = rule { capture(token0) ~ OWS }
+
+  def `quoted-string`: Rule1[String] = rule {
+    DQUOTE ~ clearSB() ~ zeroOrMore(qdtext ~ appendSB() | `quoted-pair`) ~ push(sb.toString) ~ DQUOTE ~ OWS
+  }
+
+  def qdtext = rule { `qdtext-base` | `obs-text` }
+
+  def `obs-text` = rule { "\u0080" - "\uFFFE" }
+
+  def `quoted-pair` = rule { '\\' ~ (`quotable-base` | `obs-text`) ~ appendSB() }
+
+  // builds a string via the StringBuilding StringBuilder
+  def comment(maxNesting: Int = 10): Rule0 = rule {
+    ws('(') ~ clearSB() ~ zeroOrMore(ctext | `quoted-cpair` | `nested-comment`(maxNesting)) ~ ws(')')
+  }
+
+  def `nested-comment`(maxNesting: Int) =
+    if (maxNesting == 0) throw new ParsingException(ErrorInfo("Illegal header value", "Header comment nested too deeply"))
+    else {
+      var saved: String = null
+      rule { &('(') ~ run { saved = sb.toString } ~ (comment(maxNesting - 1) ~ prependSB(saved + " (") ~ appendSB(')') | setSB(saved) ~ test(false)) }
+    }
+
+  def ctext = rule { (`ctext-base` | `obs-text`) ~ appendSB() }
+
+  def `quoted-cpair` = `quoted-pair`
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7234#section-5.3
+  // ******************************************************************************************
+
+  def `expires-date`: Rule1[DateTime] = ??? /*rule {
+    (`HTTP-date` | zeroOrMore(ANY) ~ push(DateTime.MinValue)) ~ OWS
+  }*/
+  /*
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7231#section-7.1.1.1
+  // but more lenient where we have already seen differing implementations in the field
+  // ******************************************************************************************
+
+  def `HTTP-date`: Rule1[DateTime] = rule {
+    (`IMF-fixdate` | `asctime-date` | '0' ~ push(DateTime.MinValue)) ~ OWS
+  }
+
+  def `IMF-fixdate` = rule { // mixture of the spec-ed `IMF-fixdate` and `rfc850-date`
+    (`day-name-l` | `day-name`) ~ ", " ~ (date1 | date2) ~ ' ' ~ `time-of-day` ~ ' ' ~ ("GMT" | "UTC") ~> {
+      (wkday, day, month, year, hour, min, sec) => createDateTime(year, month, day, hour, min, sec, wkday)
+    }
+  }
+
+  def `day-name` = rule(
+    "Sun" ~ push(0) | "Mon" ~ push(1) | "Tue" ~ push(2) | "Wed" ~ push(3) | "Thu" ~ push(4) | "Fri" ~ push(5) | "Sat" ~ push(6))
+
+  def date1 = rule { day ~ `date-sep` ~ month ~ `date-sep` ~ year }
+
+  def day = rule { digit2 | digit }
+
+  def month = rule(
+    "Jan" ~ push(1) | "Feb" ~ push(2) | "Mar" ~ push(3) | "Apr" ~ push(4) | "May" ~ push(5) | "Jun" ~ push(6) | "Jul" ~ push(7) |
+      "Aug" ~ push(8) | "Sep" ~ push(9) | "Oct" ~ push(10) | "Nov" ~ push(11) | "Dec" ~ push(12))
+
+  def year = rule { digit4 | digit2 ~> (y => if (y <= 69) y + 2000 else y + 1900) }
+
+  def `time-of-day` = rule { hour ~ ':' ~ minute ~ ':' ~ second }
+  def hour = rule { digit2 }
+  def minute = rule { digit2 }
+  def second = rule { digit2 }
+
+  // def `obs-date` = rule { `rfc850-date` | `asctime-date` }
+
+  // def `rfc850-date` = rule { `day-name-l` ~ ", " ~ date2 ~ ' ' ~ `time-of-day` ~ " GMT" }
+
+  // per #17714, parse two digit year to https://tools.ietf.org/html/rfc6265#section-5.1.1
+  def date2 = rule { day ~ '-' ~ month ~ '-' ~ (digit2 ~> (y => if (y <= 69) y + 2000 else y + 1900)) }
+
+  def `day-name-l` = rule(
+    "Sunday" ~ push(0) | "Monday" ~ push(1) | "Tuesday" ~ push(2) | "Wednesday" ~ push(3) | "Thursday" ~ push(4) |
+      "Friday" ~ push(5) | "Saturday" ~ push(6))
+
+  def `asctime-date` = rule {
+    `day-name` ~ ' ' ~ date3 ~ ' ' ~ `time-of-day` ~ ' ' ~ year ~> {
+      (wkday, month, day, hour, min, sec, year) => createDateTime(year, month, day, hour, min, sec, wkday)
+    }
+  }
+
+  def date3 = rule { month ~ ' ' ~ (digit2 | ' ' ~ digit) }
+*/
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7231#section-5.3.1
+  // ******************************************************************************************
+
+  def weight = rule { ws(';') ~ ws('q') ~ ws('=') ~ qvalue } // a bit more lenient than the spec
+
+  def qvalue = rule { // a bit more lenient than the spec
+    capture('0' ~ optional('.' ~ zeroOrMore(DIGIT))
+      | '.' ~ oneOrMore(DIGIT)
+      | '1' ~ optional('.' ~ zeroOrMore('0'))) ~> (_.toFloat) ~ OWS
+  }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7231#section-3.1.1.1
+  // ******************************************************************************************
+
+  def `media-type`: RuleN[String :: String :: Seq[(String, String)] :: HNil] = rule {
+    `type` ~ '/' ~ subtype ~ zeroOrMore(ws(';') ~ parameter)
+  }
+
+  def `type` = rule { token }
+
+  def subtype = rule { token }
+
+  def parameter = rule { attribute ~ ws('=') ~ value ~> ((_, _)) }
+
+  def attribute = rule { token }
+
+  def value = rule { word }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc4647#section-2.1
+  // ******************************************************************************************
+  def language = rule {
+    `primary-tag` ~ zeroOrMore('-' ~ `sub-tag`) ~> (Language(_, _))
+  }
+
+  def `primary-tag` = rule { capture(oneOrMore(ALPHA)) ~ OWS }
+
+  def `sub-tag` = rule { capture(oneOrMore(ALPHANUM)) ~ OWS }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc4647#section-2.1
+  // ******************************************************************************************
+
+  def `auth-scheme` = rule { token }
+
+  def `auth-param` = rule { token ~ ws('=') ~ word }
+
+  def `token68` = rule { capture(oneOrMore(`token68-start`) ~ zeroOrMore('=')) ~ OWS }
+
+  def challenge = rule {
+    `challenge-or-credentials` ~> { (scheme, tokenAndParams) =>
+      tokenAndParams match {
+        case ("", Nil)    => HttpChallenge(scheme, None)
+        case (token, Nil) => HttpChallenge(scheme, None, Map("" -> token))
+        case (_, params) => {
+          val (realms, otherParams) = params.partition(_._1 equalsIgnoreCase "realm")
+          HttpChallenge(scheme, realms.headOption.map(_._2), TreeMap(otherParams: _*))
+        }
+      }
+    }
+  }
+
+  def `challenge-or-credentials`: Rule2[String, (String, Seq[(String, String)])] = rule {
+    `auth-scheme` ~ (
+      oneOrMore(`auth-param` ~> (_ -> _)).separatedBy(listSep) ~> (x => ("", x))
+      | `token68` ~> (x => (x, Nil))
+      | push(("", Nil)))
+  }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7234#section-1.2.1
+  // ******************************************************************************************
+
+  def `delta-seconds` = rule { longNumberCappedAtIntMaxValue }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7232#section-2.3
+  // ******************************************************************************************
+
+  def `entity-tag` = rule {
+    ("W/" ~ push(true) | push(false)) ~ `opaque-tag` ~> ((weak, tag) => EntityTag(tag, weak))
+  }
+
+  def `opaque-tag` = rule { '"' ~ capture(zeroOrMore(`etagc-base` | `obs-text`)) ~ '"' }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7235#section-2.1
+  // ******************************************************************************************
+  def credentials = rule {
+    `basic-credential-def` | `oauth2-bearer-token` | `generic-credentials`
+  }
+
+  def `basic-credential-def` = rule {
+    ignoreCase("basic") ~ OWS ~ `basic-cookie` ~> (BasicHttpCredentials(_))
+  }
+
+  def `basic-cookie` = rule { `token68` }
+
+  // http://tools.ietf.org/html/rfc6750#section-2.1
+  def `oauth2-bearer-token` = rule {
+    ignoreCase("bearer") ~ OWS ~ `token68` ~> (OAuth2BearerToken(_))
+  }
+
+  def `generic-credentials` = rule {
+    `challenge-or-credentials` ~> ((scheme, tokenAndParams) => {
+      val (token, params) = tokenAndParams
+      GenericHttpCredentials(scheme, token, TreeMap(params: _*))
+    })
+  }
+
+  /**
+   * Either `Some(cookiePair)` if the cookie pair is parsable using the giving cookie parsing mode
+   * or None, otherwise.
+   */
+  def `optional-cookie-pair`: Rule1[Option[HttpCookiePair]] = rule {
+    (`cookie-pair` ~ &(`cookie-separator`) ~> (Some(_: HttpCookiePair))) |
+      // fallback that parses and discards everything until the next semicolon
+      (zeroOrMore(!`cookie-separator` ~ ANY) ~ &(`cookie-separator`) ~ push(None))
+  }
+
+  def `cookie-pair`: Rule1[HttpCookiePair] = rule {
+    `cookie-name` ~ ws('=') ~ `cookie-value` ~> (createCookiePair _)
+  }
+
+  def `cookie-name` = rule { token }
+
+  // abstract methods need to be implemented depending on actual cookie parsing mode
+  def `cookie-value`: Rule1[String]
+  def createCookiePair(name: String, value: String): HttpCookiePair
+
+  // ******************************************************************************************
+  // https://tools.ietf.org/html/rfc6265#section-4.1.1
+  // ******************************************************************************************
+  def `cookie-value-rfc-6265` = rule {
+    ('"' ~ capture(zeroOrMore(`cookie-octet-rfc-6265`)) ~ '"' | capture(zeroOrMore(`cookie-octet-rfc-6265`))) ~ OWS
+  }
+
+  def `cookie-value-raw` = rule {
+    capture(zeroOrMore(`cookie-octet-raw`)) ~ OWS
+  }
+
+  def `cookie-av` = rule {
+    `expires-av` | `max-age-av` | `domain-av` | `path-av` | `same-site-av` | `secure-av` | `httponly-av` | `extension-av`
+  }
+
+  def `expires-av` = rule {
+    ignoreCase("expires=") ~ OWS ~ `expires-date` ~> { (c: HttpCookie, dt: DateTime) => c.withExpires(dt) }
+  }
+
+  def `max-age-av` = rule {
+    ignoreCase("max-age=") ~ OWS ~ longNumberCappedAtIntMaxValue ~> { (c: HttpCookie, seconds: Long) => c.withMaxAge(seconds) }
+  }
+
+  def `domain-av` = rule {
+    ignoreCase("domain=") ~ OWS ~ `domain-value` ~> { (c: HttpCookie, domainName: String) => c.withDomain(domainName) }
+  }
+
+  // https://tools.ietf.org/html/rfc1034#section-3.5 relaxed by https://tools.ietf.org/html/rfc1123#section-2
+  // to also allow digits at the start of a label
+  def `domain-value` = rule {
+    optional('.') ~ capture(oneOrMore(oneOrMore(oneOrMore(ALPHANUM)).separatedBy('-')).separatedBy('.')) ~ OWS
+  }
+
+  def `path-av` = rule {
+    ignoreCase("path=") ~ OWS ~ `path-value` ~> { (c: HttpCookie, pathValue: String) => c.withPath(pathValue) }
+  }
+
+  // http://www.rfc-editor.org/errata_search.php?rfc=6265
+  def `path-value` = rule {
+    capture(zeroOrMore(`av-octet`)) ~ OWS
+  }
+
+  def `same-site-av` = rule {
+    ignoreCase("samesite=") ~ OWS ~ `same-site-value` ~> { (c: HttpCookie, sameSiteValue: String) => c.withSameSite(sameSite = SameSite(sameSiteValue)) }
+  }
+
+  def `same-site-value` = rule {
+    capture(ignoreCase("lax") | ignoreCase("strict") | ignoreCase("none")) ~ OWS
+  }
+
+  def `secure-av` = rule {
+    ignoreCase("secure") ~ OWS ~> { (cookie: HttpCookie) => cookie.withSecure(true) }
+  }
+
+  def `httponly-av` = rule {
+    ignoreCase("httponly") ~ OWS ~> { (cookie: HttpCookie) => cookie.withHttpOnly(true) }
+  }
+
+  // http://www.rfc-editor.org/errata_search.php?rfc=6265
+  def `extension-av` = rule {
+    !(ignoreCase("expires=")
+      | ignoreCase("max-age=")
+      | ignoreCase("domain=")
+      | ignoreCase("path=")
+      | ignoreCase("samesite=")
+      | ignoreCase("secure")
+      | ignoreCase("httponly")) ~
+      capture(zeroOrMore(`av-octet`)) ~ OWS ~> { (c: HttpCookie, s: String) => c.withExtension(s) }
+  }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc6454#section-7.1
+  // ******************************************************************************************
+  def `origin-list-or-null` = rule {
+    "null" ~ OWS ~ push(immutable.Seq.empty[HttpOrigin]) | `origin-list`
+  }
+
+  def `origin-list` = rule {
+    oneOrMore(capture(oneOrMore(VCHAR)) ~> (HttpOrigin(_))).separatedBy(SP) ~ OWS // offload to URL parser
+  }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7233#appendix-D
+  // ******************************************************************************************
+
+  def `byte-content-range` = rule { `bytes-unit` ~ (`byte-range-resp` | `unsatisfied-range`) }
+
+  def `byte-range` = rule {
+    `first-byte-pos` ~ ws('-') ~ `last-byte-pos`
+  }
+
+  def `byte-range-resp` = rule {
+    `byte-range` ~ ws('/') ~ (`complete-length` ~> (Some(_)) | ws('*') ~ push(None)) ~> (ContentRange(_, _, _))
+  }
+
+  def `byte-range-set` = rule {
+    zeroOrMore(ws(',')) ~ oneOrMore(`byte-range-spec` | `suffix-byte-range-spec`).separatedBy(listSep)
+  }
+
+  def `byte-range-spec` = rule {
+    `first-byte-pos` ~ ws('-') ~ (`last-byte-pos` ~> (ByteRange(_: Long, _)) | run(ByteRange.fromOffset(_)))
+  }
+
+  def `byte-ranges-specifier` = rule { `bytes-unit` ~ ws('=') ~ `byte-range-set` }
+
+  def `bytes-unit` = rule { "bytes" ~ OWS ~ push(RangeUnits.Bytes) }
+
+  def `complete-length` = rule { longNumberCapped }
+
+  def `first-byte-pos` = rule { longNumberCapped }
+
+  def `last-byte-pos` = rule { longNumberCapped }
+
+  def `other-content-range` = rule { `other-range-unit` ~ `other-range-resp` }
+
+  def `other-range-resp` = rule { capture(zeroOrMore(ANY)) ~> (ContentRange.Other(_)) }
+
+  def `other-range-set` = rule { oneOrMore(VCHAR) ~ OWS }
+
+  def `other-range-unit` = rule { token ~> (RangeUnits.Other(_)) }
+
+  def `other-ranges-specifier` = rule { `other-range-unit` ~ ws('=') ~ `other-range-set` }
+
+  def `range-unit` = rule { `bytes-unit` | `other-range-unit` }
+
+  def `suffix-byte-range-spec` = rule { '-' ~ `suffix-length` ~> (ByteRange.suffix(_)) }
+
+  def `suffix-length` = rule { longNumberCapped }
+
+  def `unsatisfied-range` = rule { '*' ~ '/' ~ `complete-length` ~> (ContentRange.Unsatisfiable(_)) }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7231#section-5.5.3
+  // ******************************************************************************************
+
+  def product = rule { token ~ (ws('/') ~ `product-version` | push("")) }
+
+  def `product-version` = rule { token }
+
+  def `product-or-comment`: Rule1[ProductVersion] = rule(
+    product ~ comment() ~> (ProductVersion(_, _, sb.toString))
+      | product ~> (ProductVersion(_, _))
+      | comment() ~ push(ProductVersion("", "", sb.toString)))
+
+  def products: Rule1[Seq[ProductVersion]] = rule {
+    `product-or-comment` ~ zeroOrMore(`product-or-comment`) ~> (_ +: _)
+  }
+
+  // ******************************************************************************************
+  // http://tools.ietf.org/html/rfc7230#section-4
+  // ******************************************************************************************
+
+  def `transfer-coding`: Rule1[TransferEncoding] = rule(
+    ignoreCase("chunked") ~ OWS ~ push(TransferEncodings.chunked)
+      | ignoreCase("gzip") ~ OWS ~ push(TransferEncodings.gzip)
+      | ignoreCase("deflate") ~ OWS ~ push(TransferEncodings.deflate)
+      | ignoreCase("compress") ~ OWS ~ push(TransferEncodings.compress)
+      | ignoreCase("trailers") ~ OWS ~ push(TransferEncodings.trailers)
+      | `transfer-extension`)
+
+  def `transfer-extension`: Rule1[TransferEncodings.Extension] = rule {
+    token ~ zeroOrMore(ws(';') ~ `transfer-parameter`) ~> (p => TreeMap(p: _*)) ~> (TransferEncodings.Extension(_, _))
+  }
+
+  def `transfer-parameter` = rule { token ~ ws('=') ~ word ~> (_ -> _) }
+
+  // ******************************************************************************************
+  //                                    helpers
+  // ******************************************************************************************
+  def token0: Rule0 = rule { oneOrMore(tchar) }
+
+  def listSep: Rule0 = rule { ',' ~ OWS }
+
+  def digit: Rule1[Int] = rule { DIGIT ~ push(digitInt(lastChar)) }
+
+  def digit2: Rule1[Int] = rule { DIGIT ~ DIGIT ~ push(digitInt(charAt(-2)) * 10 + digitInt(lastChar)) }
+
+  def digit4: Rule1[Int] = rule {
+    DIGIT ~ DIGIT ~ DIGIT ~ DIGIT ~ push(digitInt(charAt(-4)) * 1000 + digitInt(charAt(-3)) * 100 + digitInt(charAt(-2)) * 10 + digitInt(lastChar))
+  }
+
+  def ws(c: Char): Rule0 = rule { c ~ OWS }
+  def ws(s: String): Rule0 = rule { s ~ OWS }
+
+  // parses a potentially long series of digits and extracts its Long value capping at Int.MaxValue in case of overflows
+  def longNumberCappedAtIntMaxValue: Rule1[Long] = rule {
+    capture((1 to 11).times(DIGIT)) ~> (s => math.min(s.toLong, Int.MaxValue)) ~ zeroOrMore(DIGIT) ~ OWS
+  }
+
+  // parses a potentially long series of digits and extracts its Long value capping at 999,999,999,999,999,999 in case of overflows
+  def longNumberCapped: Rule1[Long] = rule(
+    (capture((1 to 18).times(DIGIT)) ~ !DIGIT ~> (_.toLong)
+      | oneOrMore(DIGIT) ~ push(999999999999999999L)) ~ OWS)
+
+  private def digitInt(c: Char): Int = c - '0'
+
+  private def createDateTime(year: Int, month: Int, day: Int, hour: Int, min: Int, sec: Int, wkday: Int): DateTime = {
+    val dt = DateTime(year, month, day, hour, min, sec)
+    if (dt.weekday != wkday)
+      throw ParsingException(s"Illegal weekday in date $dt: is '${DateTime.weekday(wkday)}' but " +
+        s"should be '${DateTime.weekday(dt.weekday)}'")
+    dt
+  }
+
+  def httpMethodDef: Rule1[HttpMethod] = rule {
+    token ~> { s =>
+      HttpMethods.getForKey(s) match {
+        case Some(m) => m
+        case None    => HttpMethod.custom(s)
+      }
+    }
+  }
+
+  def newUriParser(input: ParserInput): UriParser
+  def uriReference: Rule1[Uri] = rule { runSubParser(newUriParser(_).`URI-reference-pushed`) }
+}
+
diff --git a/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/HeaderParser.scala b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/HeaderParser.scala
new file mode 100644
index 000000000..af38548c4
--- /dev/null
+++ b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/HeaderParser.scala
@@ -0,0 +1,230 @@
+/*
+ * Copyright (C) 2009-2021 Lightbend Inc. <https://www.lightbend.com>
+ */
+
+package akka.http.impl.model.parser
+
+import akka.annotation.InternalApi
+import akka.http.scaladsl.settings.ParserSettings
+import akka.http.scaladsl.settings.ParserSettings.CookieParsingMode
+import akka.http.scaladsl.settings.ParserSettings.{ IllegalResponseHeaderValueProcessingMode, IllegalResponseHeaderNameProcessingMode }
+import akka.http.scaladsl.model.headers.HttpCookiePair
+import akka.util.ConstantFun
+
+import scala.util.control.NonFatal
+import akka.http.impl.util.SingletonException
+import akka.parboiled2._
+import akka.parboiled2.support.hlist._
+import akka.http.scaladsl.model._
+
+/**
+ * INTERNAL API.
+ */
+//@InternalApi
+//private[http] class HeaderParser(
+//  val input: ParserInput,
+//  settings:  HeaderParser.Settings = HeaderParser.DefaultSettings)
+//  extends Parser with DynamicRuleHandler[HeaderParser, HttpHeader :: HNil]
+//  with CommonRules
+//  with AcceptCharsetHeader
+//  with AcceptEncodingHeader
+//  with AcceptHeader
+//  with AcceptLanguageHeader
+//  with CacheControlHeader
+//  with ContentDispositionHeader
+//  with ContentTypeHeader
+//  with CommonActions
+//  with IpAddressParsing
+//  with LinkHeader
+//  with SimpleHeaders
+//  with StringBuilding
+//  with WebSocketHeaders {
+//  import CharacterClasses._
+//
+//  override def customMediaTypes = settings.customMediaTypes
+//  protected def maxCommentParsingDepth: Int = settings.maxCommentParsingDepth
+//
+//  // http://www.rfc-editor.org/errata_search.php?rfc=7230 errata id 4189
+//  def `header-field-value`: Rule1[String] = rule {
+//    FWS ~ clearSB() ~ `field-value` ~ FWS ~ EOI ~ push(sb.toString)
+//  }
+//  def `field-value` = {
+//    var fwsStart = cursor
+//    rule {
+//      zeroOrMore(`field-value-chunk`).separatedBy { // zeroOrMore because we need to also accept empty values
+//        run { fwsStart = cursor } ~ FWS ~ &(`field-value-char`) ~ run { if (cursor > fwsStart) sb.append(' ') }
+//      }
+//    }
+//  }
+//  def `field-value-chunk` = rule { oneOrMore(`field-value-char` ~ appendSB()) }
+//  def `field-value-char` = rule { VCHAR | `obs-text` }
+//  def FWS = rule { zeroOrMore(WSP) ~ zeroOrMore(`obs-fold`) }
+//  def `obs-fold` = rule { CRLF ~ oneOrMore(WSP) }
+//
+//  ///////////////// DynamicRuleHandler //////////////
+//
+//  override type Result = HeaderParser.Result
+//  def parser: HeaderParser = this
+//  def success(result: HttpHeader :: HNil): Result = HeaderParser.Success(result.head)
+//  def parseError(error: ParseError): HeaderParser.Failure = {
+//    val formatter = new ErrorFormatter(showLine = false)
+//    HeaderParser.Failure(ErrorInfo(formatter.format(error, input), formatter.formatErrorLine(error, input)))
+//  }
+//  def failure(error: Throwable): HeaderParser.Failure =
+//    HeaderParser.Failure {
+//      error match {
+//        case IllegalUriException(info) => info
+//        case NonFatal(e)               => ErrorInfo.fromCompoundString(e.getMessage)
+//      }
+//    }
+//  def ruleNotFound(ruleName: String): Result = HeaderParser.RuleNotFound
+//
+//  def newUriParser(input: ParserInput): UriParser = new UriParser(input, uriParsingMode = settings.uriParsingMode)
+//
+//  def `cookie-value`: Rule1[String] =
+//    settings.cookieParsingMode match {
+//      case CookieParsingMode.RFC6265 => rule { `cookie-value-rfc-6265` }
+//      case CookieParsingMode.Raw     => rule { `cookie-value-raw` }
+//    }
+//
+//  def createCookiePair(name: String, value: String): HttpCookiePair = settings.cookieParsingMode match {
+//    case CookieParsingMode.RFC6265 => HttpCookiePair(name, value)
+//    case CookieParsingMode.Raw     => HttpCookiePair.raw(name, value)
+//  }
+//}
+
+/**
+ * INTERNAL API.
+ */
+@InternalApi
+private[http] object HeaderParser {
+  sealed trait Result
+  case class Success(header: HttpHeader) extends Result
+  case class Failure(info: ErrorInfo) extends Result
+  case object RuleNotFound extends Result
+
+  object EmptyCookieException extends SingletonException("Cookie header contained no parsable cookie values.")
+
+  def lookupParser(headerName: String, settings: Settings = DefaultSettings): Option[String => HeaderParser.Result] =
+    ???
+  //    dispatch.lookup(headerName).map { runner => (value: String) =>
+  //      import akka.parboiled2.EOI
+  //      val v = value + EOI // this makes sure the parser isn't broken even if there's no trailing garbage in this value
+  //      val parser = new HeaderParser(v, settings)
+  //      runner(parser) match {
+  //        case r @ Success(_) if parser.cursor == v.length => r
+  //        case r @ Success(_) =>
+  //          Failure(ErrorInfo(
+  //            "Header parsing error",
+  //            s"Rule for $headerName accepted trailing garbage. Is the parser missing a trailing EOI?"))
+  //        case Failure(e)   => Failure(e.copy(summary = e.summary.filterNot(_ == EOI), detail = e.detail.filterNot(_ == EOI)))
+  //        case RuleNotFound => RuleNotFound
+  //      }
+  //    }
+
+  def parseFull(headerName: String, value: String, settings: Settings = DefaultSettings): HeaderParser.Result =
+    lookupParser(headerName, settings).map(_(value)).getOrElse(HeaderParser.RuleNotFound)
+
+  def ruleNames: Seq[String] = ???
+  //  val (dispatch, ruleNames) = DynamicRuleDispatch[HeaderParser, HttpHeader :: HNil](
+  //    "accept",
+  //    "accept-charset",
+  //    "accept-encoding",
+  //    "accept-language",
+  //    "accept-ranges",
+  //    "access-control-allow-credentials",
+  //    "access-control-allow-headers",
+  //    "access-control-allow-methods",
+  //    "access-control-allow-origin",
+  //    "access-control-expose-headers",
+  //    "access-control-max-age",
+  //    "access-control-request-headers",
+  //    "access-control-request-method",
+  //    "accept",
+  //    "age",
+  //    "allow",
+  //    "authorization",
+  //    "cache-control",
+  //    "connection",
+  //    "content-disposition",
+  //    "content-encoding",
+  //    "content-length",
+  //    "content-location",
+  //    "content-range",
+  //    "content-type",
+  //    "cookie",
+  //    "date",
+  //    "etag",
+  //    "expect",
+  //    "expires",
+  //    "host",
+  //    "if-match",
+  //    "if-modified-since",
+  //    "if-none-match",
+  //    "if-range",
+  //    "if-unmodified-since",
+  //    "last-modified",
+  //    "link",
+  //    "location",
+  //    "origin",
+  //    "proxy-authenticate",
+  //    "proxy-authorization",
+  //    "range",
+  //    "referer",
+  //    "retry-after",
+  //    "server",
+  //    "sec-websocket-accept",
+  //    "sec-websocket-extensions",
+  //    "sec-websocket-key",
+  //    "sec-websocket-protocol",
+  //    "sec-websocket-version",
+  //    "set-cookie",
+  //    "strict-transport-security",
+  //    "te",
+  //    "transfer-encoding",
+  //    "upgrade",
+  //    "user-agent",
+  //    "www-authenticate",
+  //    "x-forwarded-for",
+  //    "x-forwarded-host",
+  //    "x-forwarded-proto",
+  //    "x-real-ip")
+
+  abstract class Settings {
+    def uriParsingMode: Uri.ParsingMode
+    def cookieParsingMode: ParserSettings.CookieParsingMode
+    def customMediaTypes: MediaTypes.FindCustom
+    def maxCommentParsingDepth: Int
+    def illegalResponseHeaderNameProcessingMode: IllegalResponseHeaderNameProcessingMode
+    def illegalResponseHeaderValueProcessingMode: IllegalResponseHeaderValueProcessingMode
+  }
+  def Settings(
+    uriParsingMode:         Uri.ParsingMode                          = Uri.ParsingMode.Relaxed,
+    cookieParsingMode:      ParserSettings.CookieParsingMode         = ParserSettings.CookieParsingMode.RFC6265,
+    customMediaTypes:       MediaTypes.FindCustom                    = ConstantFun.scalaAnyTwoToNone,
+    maxCommentParsingDepth: Int                                      = 5,
+    modeValue:              IllegalResponseHeaderValueProcessingMode = ParserSettings.IllegalResponseHeaderValueProcessingMode.Error,
+    modeName:               IllegalResponseHeaderNameProcessingMode  = ParserSettings.IllegalResponseHeaderNameProcessingMode.Error): Settings = {
+
+    val _uriParsingMode = uriParsingMode
+    val _cookieParsingMode = cookieParsingMode
+    val _customMediaTypes = customMediaTypes
+    val _maxCommentParsingDepth = maxCommentParsingDepth
+    val _illegalResponseHeaderValueProcessingMode = modeValue
+    val _illegalResponseHeaderNameProcessingMode = modeName
+
+    new Settings {
+      def uriParsingMode: Uri.ParsingMode = _uriParsingMode
+      def cookieParsingMode: CookieParsingMode = _cookieParsingMode
+      def customMediaTypes: MediaTypes.FindCustom = _customMediaTypes
+      def maxCommentParsingDepth: Int = _maxCommentParsingDepth
+
+      def illegalResponseHeaderValueProcessingMode: IllegalResponseHeaderValueProcessingMode =
+        _illegalResponseHeaderValueProcessingMode
+
+      def illegalResponseHeaderNameProcessingMode: IllegalResponseHeaderNameProcessingMode =
+        _illegalResponseHeaderNameProcessingMode
+    }
+  }
+  val DefaultSettings: Settings = Settings()
+}
diff --git a/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/StringBuilding.scala b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/StringBuilding.scala
new file mode 100644
index 000000000..3e95c2a99
--- /dev/null
+++ b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/StringBuilding.scala
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2009-2017 Mathias Doenitz, Alexander Myltsev
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package akka.http.impl.model.parser
+
+import akka.parboiled2._
+
+/**
+ * For certain high-performance use-cases it is better to construct Strings
+ * that the parser is to produce/extract from the input in a char-by-char fashion.
+ *
+ * Mixing this trait into your parser gives you a simple facility to support this.
+ */
+private[parser] trait StringBuilding { this: Parser =>
+  protected val sb = new java.lang.StringBuilder
+
+  def clearSB(): Rule0 = rule { run(sb.setLength(0)) }
+
+  def appendSB(): Rule0 = rule { run(sb.append(lastChar)) }
+
+  def appendSB(offset: Int): Rule0 = rule { run(sb.append(charAt(offset))) }
+
+  def appendSB(c: Char): Rule0 = rule { run(sb.append(c)) }
+
+  def appendSB(s: String): Rule0 = rule { run(sb.append(s)) }
+
+  def prependSB(): Rule0 = rule { run(doPrepend(lastChar)) }
+
+  def prependSB(offset: Int): Rule0 = rule { run(doPrepend(charAt(offset))) }
+
+  def prependSB(c: Char): Rule0 = rule { run(doPrepend(c)) }
+
+  def prependSB(s: String): Rule0 = rule { run(doPrepend(s)) }
+
+  def setSB(s: String): Rule0 = rule { run(doSet(s)) }
+
+  private def doPrepend(c: Char): Unit = {
+    val saved = sb.toString
+    sb.setLength(0)
+    sb.append(c)
+    sb.append(saved)
+  }
+
+  private def doPrepend(s: String): Unit = {
+    val saved = sb.toString
+    sb.setLength(0)
+    sb.append(s)
+    sb.append(saved)
+  }
+
+  private def doSet(s: String): Unit = {
+    sb.setLength(0)
+    sb.append(s)
+  }
+}
diff --git a/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/UriParser.scala b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/UriParser.scala
new file mode 100644
index 000000000..5d237afc9
--- /dev/null
+++ b/akka-http-core/src/main/scala-3/akka/http/impl/model/parser/UriParser.scala
@@ -0,0 +1,254 @@
+/*
+ * Copyright (C) 2009-2021 Lightbend Inc. <https://www.lightbend.com>
+ */
+
+package akka.http.impl.model.parser
+
+import java.nio.charset.Charset
+
+import akka.parboiled2._
+import akka.http.impl.util.{ StringRendering, enhanceString_ }
+import akka.http.scaladsl.model.{ Uri, UriRendering }
+import akka.http.scaladsl.model.headers.HttpOrigin
+import Parser.DeliveryScheme.Either
+import Uri._
+import akka.annotation.InternalApi
+
+/**
+ * INTERNAL API
+ *
+ * http://tools.ietf.org/html/rfc3986
+ */
+@InternalApi
+private[http] final class UriParser(
+  private[this] var _input: ParserInput,
+  val uriParsingCharset:    Charset,
+  val uriParsingMode:       Uri.ParsingMode,
+  val maxValueStackSize:    Int) extends Parser(maxValueStackSize = maxValueStackSize) /*with IpAddressParsing with StringBuilding*/ {
+  import CharacterClasses._
+
+  override def input: ParserInput = _input
+
+  def this(
+    input:             ParserInput,
+    uriParsingCharset: Charset         = UTF8,
+    uriParsingMode:    Uri.ParsingMode = Uri.ParsingMode.Relaxed) =
+    this(input, uriParsingCharset, uriParsingMode, 1024)
+
+  def parseAbsoluteUri(): Uri = ???
+
+  def parseUriReference(): Uri = ???
+
+  def parseAndResolveUriReference(base: Uri): Uri = ???
+
+  def parseOrigin(): HttpOrigin = ???
+
+  def parseHost(): Host = ???
+
+  /**
+   * @return a 'raw' (percent-encoded) query string that does not contain invalid characters.
+   */
+  def parseRawQueryString(): String = ???
+
+  /**
+   * @param rawQueryString 'raw' (percent-encoded) query string that in Relaxed mode may contain characters not allowed
+   * by https://tools.ietf.org/html/rfc3986#section-3.4 but is guaranteed not to have invalid percent-encoded characters
+   * @return a 'raw' (percent-encoded) query string that does not contain invalid characters.
+   */
+  def parseSafeRawQueryString(rawQueryString: String): String = uriParsingMode match {
+    case Uri.ParsingMode.Strict =>
+      // Cannot contain invalid characters in strict mode
+      rawQueryString
+    case Uri.ParsingMode.Relaxed =>
+      // Percent-encode invalid characters
+      UriRendering.encode(new StringRendering, rawQueryString, uriParsingCharset, `query-fragment-char` ++ '%', false).get
+  }
+
+  def parseQuery(): Query = ???
+
+  def parseAuthority(): Authority = ???
+  /*rule(authority ~ EOI).run() match {
+      case Right(_)    => Authority(_host, _port, _userinfo)
+      case Left(error) => fail(error, "authority")
+    }*/
+
+  //  def fail(error: ParseError, target: String): Nothing = {
+  //    val formatter = new ErrorFormatter(showLine = false)
+  //    Uri.fail(s"Illegal $target: " + formatter.format(error, input), formatter.formatErrorLine(error, input))
+  //  }
+  //
+  //  private[this] val `path-segment-char` = uriParsingMode match {
+  //    case Uri.ParsingMode.Strict => `pchar-base`
+  //    case _                      => `relaxed-path-segment-char`
+  //  }
+  //  private[this] val `query-char` = uriParsingMode match {
+  //    case Uri.ParsingMode.Strict => `query-fragment-char`
+  //    case _                      => `relaxed-query-char`
+  //  }
+  //  private[this] val `query-key-char` = uriParsingMode match {
+  //    case Uri.ParsingMode.Strict  => `strict-query-key-char`
+  //    case Uri.ParsingMode.Relaxed => `relaxed-query-key-char`
+  //  }
+  //  private[this] val `query-value-char` = uriParsingMode match {
+  //    case Uri.ParsingMode.Strict  => `strict-query-value-char`
+  //    case Uri.ParsingMode.Relaxed => `relaxed-query-value-char`
+  //  }
+  //  private[this] val `fragment-char` = uriParsingMode match {
+  //    case Uri.ParsingMode.Strict => `query-fragment-char`
+  //    case _                      => `relaxed-fragment-char`
+  //  }
+  //
+  //  // New vars need to be reset in `reset` below
+  //  private[this] var _scheme = ""
+  //  private[this] var _userinfo = ""
+  //  private[this] var _host: Host = Host.Empty
+  //  private[this] var _port: Int = 0
+  //  private[this] var _path: Path = Path.Empty
+  //  /**
+  //   *  Percent-encoded. When in in 'relaxed' mode, characters not permitted by https://tools.ietf.org/html/rfc3986#section-3.4
+  //   *  are already automatically percent-encoded here
+  //   */
+  //  private[this] var _rawQueryString: Option[String] = None
+  //  private[this] var _fragment: Option[String] = None
+  //
+  //  /** Allows to reuse this parser. */
+  //  def reset(newInput: ParserInput): Unit = {
+  //    _input = newInput
+  //    _scheme = ""
+  //    _userinfo = ""
+  //    _host = Host.Empty
+  //    _port = 0
+  //    _path = Path.Empty
+  //    _rawQueryString = None
+  //    _fragment = None
+  //    _firstPercentIx = -1
+  //  }
+  //
+  //  private[this] def setScheme(scheme: String): Unit = _scheme = scheme
+  //  private[this] def setUserInfo(userinfo: String): Unit = _userinfo = userinfo
+  //  private[this] def setHost(host: Host): Unit = _host = host
+  //  private[this] def setPort(port: Int): Unit = _port = port
+  //  private[this] def setPath(path: Path): Unit = _path = path
+  //  private[this] def setRawQueryString(rawQueryString: String): Unit = _rawQueryString = Some(parseSafeRawQueryString(rawQueryString))
+  //  private[this] def setFragment(fragment: String): Unit = _fragment = Some(fragment)
+  //
+  //  // http://tools.ietf.org/html/rfc3986#appendix-A
+  //
+  //  def URI = rule { scheme ~ ':' ~ `hier-part` ~ optional('?' ~ rawQueryString) ~ optional('#' ~ fragment) }
+  //
+  //  def origin = rule { scheme ~ ':' ~ '/' ~ '/' ~ hostAndPort }
+  //
+  //  def `hier-part` = rule(
+  //    '/' ~ '/' ~ authority ~ `path-abempty`
+  //      | `path-absolute`
+  //      | `path-rootless`
+  //      | `path-empty`)
+  //
+  //  def `URI-reference` = rule { URI | `relative-ref` }
+  //
+  def `URI-reference-pushed`: Rule1[Uri] = ??? // rule { `URI-reference` ~ push(createUriReference()) }
+  //
+  //  def `absolute-URI` = rule { scheme ~ ':' ~ `hier-part` ~ optional('?' ~ rawQueryString) }
+  //
+  //  def `relative-ref` = rule { `relative-part` ~ optional('?' ~ rawQueryString) ~ optional('#' ~ fragment) }
+  //
+  //  def `relative-part` = rule(
+  //    '/' ~ '/' ~ authority ~ `path-abempty`
+  //      | `path-absolute`
+  //      | `path-noscheme`
+  //      | `path-empty`)
+  //
+  //  def scheme = rule(
+  //    'h' ~ 't' ~ 't' ~ 'p' ~ (&(':') ~ run(setScheme("http")) | 's' ~ &(':') ~ run(setScheme("https")))
+  //      | clearSB() ~ ALPHA ~ appendLowered() ~ zeroOrMore(`scheme-char` ~ appendLowered()) ~ &(':') ~ run(setScheme(sb.toString)))
+  //
+  //  def `scheme-pushed` = rule { oneOrMore(`scheme-char` ~ appendLowered()) ~ run(setScheme(sb.toString)) ~ push(_scheme) }
+  //
+  //  def authority = rule { optional(userinfo) ~ hostAndPort }
+  //
+  //  def userinfo = rule {
+  //    clearSBForDecoding() ~ zeroOrMore(`userinfo-char` ~ appendSB() | `pct-encoded`) ~ '@' ~ run(setUserInfo(getDecodedString()))
+  //  }
+  //
+  //  def hostAndPort = rule { host ~ optional(':' ~ port) }
+  //
+  //  def `hostAndPort-pushed` = rule { hostAndPort ~ push(_host) ~ push(_port) }
+  //
+  //  def host = rule { `IP-literal` | ipv4Host | `reg-name` }
+  //
+  //  /** A relaxed host rule to use in `parseHost` that also recognizes IPv6 address without the brackets. */
+  //  def relaxedHost = rule { `IP-literal` | ipv6Host | ipv4Host | `reg-name` }
+  //
+  //  def port = rule {
+  //    DIGIT ~ run(setPort(lastChar - '0')) ~ optional(
+  //      DIGIT ~ run(setPort(10 * _port + lastChar - '0')) ~ optional(
+  //        DIGIT ~ run(setPort(10 * _port + lastChar - '0')) ~ optional(
+  //          DIGIT ~ run(setPort(10 * _port + lastChar - '0')) ~ optional(
+  //            DIGIT ~ run(setPort(10 * _port + lastChar - '0'))))))
+  //  }
+  //
+  //  def `IP-literal` = rule { '[' ~ ipv6Host ~ ']' } // IPvFuture not currently recognized
+  //
+  //  def ipv4Host = rule { capture(`ip-v4-address`) ~ &(colonSlashEOI) ~> ((b, a) => _host = IPv4Host(b, a)) }
+  //  def ipv6Host = rule { capture(`ip-v6-address`) ~> ((b, a) => setHost(IPv6Host(b, a))) }
+  //
+  //  def `reg-name` = rule(
+  //    clearSBForDecoding() ~ oneOrMore(`lower-reg-name-char` ~ appendSB() | UPPER_ALPHA ~ appendLowered() | `pct-encoded`) ~
+  //      run(setHost(NamedHost(getDecodedStringAndLowerIfEncoded(UTF8))))
+  //      | run(setHost(Host.Empty)))
+  //
+  //  def `path-abempty` = rule { clearSB() ~ slashSegments ~ savePath() }
+  //  def `path-absolute` = rule { clearSB() ~ '/' ~ appendSB('/') ~ optional(`segment-nz` ~ slashSegments) ~ savePath() }
+  //  def `path-noscheme` = rule { clearSB() ~ `segment-nz-nc` ~ slashSegments ~ savePath() }
+  //  def `path-rootless` = rule { clearSB() ~ `segment-nz` ~ slashSegments ~ savePath() }
+  //  def `path-empty` = rule { MATCH }
+  //
+  //  def slashSegments = rule { zeroOrMore('/' ~ appendSB('/') ~ segment) }
+  //
+  //  def segment = rule { zeroOrMore(pchar) }
+  //  def `segment-nz` = rule { oneOrMore(pchar) }
+  //  def `segment-nz-nc` = rule { oneOrMore(!':' ~ pchar) }
+  //
+  //  def pchar = rule { `path-segment-char` ~ appendSB() | `pct-encoded` }
+  //
+  //  def rawQueryString = rule {
+  //    clearSB() ~ oneOrMore(`query-char` ~ appendSB() | `pct-encoded`) ~ run(setRawQueryString(sb.toString)) | run(setRawQueryString(""))
+  //  }
+
+  // https://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.1
+  def query: Rule1[Query] = ???
+
+  def fragment = ???
+
+  def `pct-encoded` = ???
+
+  //////////////////////////// ADDITIONAL HTTP-SPECIFIC RULES //////////////////////////
+
+  // http://tools.ietf.org/html/rfc7230#section-2.7
+  def `absolute-path` = ???
+
+  // http://tools.ietf.org/html/rfc7230#section-5.3
+  def `request-target` = ???
+
+  def parseHttpRequestTarget(): Uri = ???
+
+  /////////////////////////// ADDITIONAL HTTP/2-SPECIFIC RULES /////////////////////////
+
+  // https://tools.ietf.org/html/rfc7540#section-8.1.2.3
+  // https://tools.ietf.org/html/rfc3986#section-3.2 - without deprecated userinfo
+  def `http2-authority-pseudo-header` = ???
+
+  def parseHttp2AuthorityPseudoHeader(): Uri.Authority = ???
+
+  // https://tools.ietf.org/html/rfc7540#section-8.1.2.3
+  def `http2-path-pseudo-header` = ???
+
+  /**
+   * @return path and percent-encoded query string. When in in 'relaxed' mode, characters not permitted by https://tools.ietf.org/html/rfc3986#section-3.4
+   *         are already automatically percent-encoded here
+   */
+  def parseHttp2PathPseudoHeader(): (Uri.Path, Option[String]) = ???
+
+  /** Allows to reuse this parser. */
+  def reset(newInput: ParserInput): Unit = ???
+}


---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@pekko.apache.org
For additional commands, e-mail: commits-help@pekko.apache.org