You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hudi.apache.org by si...@apache.org on 2023/01/24 23:41:38 UTC
[hudi] branch asf-site updated: [MIBOR] Added backslashes and set the code type to shell (#7743)
This is an automated email from the ASF dual-hosted git repository.
sivabalan pushed a commit to branch asf-site
in repository https://gitbox.apache.org/repos/asf/hudi.git
The following commit(s) were added to refs/heads/asf-site by this push:
new f0afed91688 [MIBOR] Added backslashes and set the code type to shell (#7743)
f0afed91688 is described below
commit f0afed916881a0f6dcbcb9f3eba86610d30314db
Author: Jon Vexler <jb...@gmail.com>
AuthorDate: Tue Jan 24 18:41:32 2023 -0500
[MIBOR] Added backslashes and set the code type to shell (#7743)
Co-authored-by: Jonathan Vexler <=>
---
.../version-0.10.0/quick-start-guide.md | 28 ++++++++---------
.../version-0.10.1/quick-start-guide.md | 36 +++++++++++-----------
.../version-0.8.0/quick-start-guide.md | 22 ++++++-------
.../version-0.9.0/quick-start-guide.md | 22 ++++++-------
4 files changed, 54 insertions(+), 54 deletions(-)
diff --git a/website/versioned_docs/version-0.10.0/quick-start-guide.md b/website/versioned_docs/version-0.10.0/quick-start-guide.md
index f5392bd05bb..e98d34d9d3e 100644
--- a/website/versioned_docs/version-0.10.0/quick-start-guide.md
+++ b/website/versioned_docs/version-0.10.0/quick-start-guide.md
@@ -37,20 +37,20 @@ values={[
From the extracted directory run spark-shell with Hudi as:
-```scala
-// spark-shell for spark 3
+```shell
+# spark-shell for spark 3
spark-shell \
--packages org.apache.hudi:hudi-spark3-bundle_2.12:0.10.0,org.apache.spark:spark-avro_2.12:3.1.2 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
-// spark-shell for spark 2 with scala 2.12
+# spark-shell for spark 2 with scala 2.12
spark-shell \
--packages org.apache.hudi:hudi-spark-bundle_2.12:0.10.0,org.apache.spark:spark-avro_2.12:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
-// spark-shell for spark 2 with scala 2.11
+# spark-shell for spark 2 with scala 2.11
spark-shell \
--packages org.apache.hudi:hudi-spark-bundle_2.11:0.10.0,org.apache.spark:spark-avro_2.11:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
@@ -86,26 +86,26 @@ spark-sql \
From the extracted directory run pyspark with Hudi as:
-```python
+```shell
# pyspark
export PYSPARK_PYTHON=$(which python3)
# for spark3
-pyspark
---packages org.apache.hudi:hudi-spark3-bundle_2.12:0.10.0,org.apache.spark:spark-avro_2.12:3.1.2
---conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
+pyspark \
+--packages org.apache.hudi:hudi-spark3-bundle_2.12:0.10.0,org.apache.spark:spark-avro_2.12:3.1.2 \
+--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
# for spark2 with scala 2.12
-pyspark
---packages org.apache.hudi:hudi-spark-bundle_2.12:0.10.0,org.apache.spark:spark-avro_2.12:2.4.4
---conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
+pyspark \
+--packages org.apache.hudi:hudi-spark-bundle_2.12:0.10.0,org.apache.spark:spark-avro_2.12:2.4.4 \
+--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
# for spark2 with scala 2.11
-pyspark
---packages org.apache.hudi:hudi-spark-bundle_2.11:0.10.0,org.apache.spark:spark-avro_2.11:2.4.4
---conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
+pyspark \
+--packages org.apache.hudi:hudi-spark-bundle_2.11:0.10.0,org.apache.spark:spark-avro_2.11:2.4.4 \
+--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
```
diff --git a/website/versioned_docs/version-0.10.1/quick-start-guide.md b/website/versioned_docs/version-0.10.1/quick-start-guide.md
index 7254da0fc2b..e96400c9329 100644
--- a/website/versioned_docs/version-0.10.1/quick-start-guide.md
+++ b/website/versioned_docs/version-0.10.1/quick-start-guide.md
@@ -37,26 +37,26 @@ values={[
From the extracted directory run spark-shell with Hudi as:
-```scala
-// spark-shell for spark 3.1
+```shell
+# spark-shell for spark 3.1
spark-shell \
--packages org.apache.hudi:hudi-spark3.1.2-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:3.1.2 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
-// spark-shell for spark 3.2
+# spark-shell for spark 3.2
spark-shell \
--packages org.apache.hudi:hudi-spark3.0.3-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:3.0.3 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
-// spark-shell for spark 2 with scala 2.12
+# spark-shell for spark 2 with scala 2.12
spark-shell \
--packages org.apache.hudi:hudi-spark-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
-// spark-shell for spark 2 with scala 2.11
+# spark-shell for spark 2 with scala 2.11
spark-shell \
--packages org.apache.hudi:hudi-spark-bundle_2.11:0.10.1,org.apache.spark:spark-avro_2.11:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
@@ -97,32 +97,32 @@ spark-sql \
From the extracted directory run pyspark with Hudi as:
-```python
+```shell
# pyspark
export PYSPARK_PYTHON=$(which python3)
# for spark3.1
-pyspark
---packages org.apache.hudi:hudi-spark3.1.2-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:3.1.2
---conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
+pyspark \
+--packages org.apache.hudi:hudi-spark3.1.2-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:3.1.2 \
+--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
# for spark3.0
-pyspark
---packages org.apache.hudi:hudi-spark3.0.3-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:3.0.3
---conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
+pyspark \
+--packages org.apache.hudi:hudi-spark3.0.3-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:3.0.3 \
+--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
# for spark2 with scala 2.12
-pyspark
---packages org.apache.hudi:hudi-spark-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:2.4.4
---conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
+pyspark \
+--packages org.apache.hudi:hudi-spark-bundle_2.12:0.10.1,org.apache.spark:spark-avro_2.12:2.4.4 \
+--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
# for spark2 with scala 2.11
-pyspark
---packages org.apache.hudi:hudi-spark-bundle_2.11:0.10.1,org.apache.spark:spark-avro_2.11:2.4.4
---conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
+pyspark \
+--packages org.apache.hudi:hudi-spark-bundle_2.11:0.10.1,org.apache.spark:spark-avro_2.11:2.4.4 \
+--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer' \
--conf 'spark.sql.extensions=org.apache.spark.sql.hudi.HoodieSparkSessionExtension'
```
diff --git a/website/versioned_docs/version-0.8.0/quick-start-guide.md b/website/versioned_docs/version-0.8.0/quick-start-guide.md
index d7db345947f..3a7c7a24417 100644
--- a/website/versioned_docs/version-0.8.0/quick-start-guide.md
+++ b/website/versioned_docs/version-0.8.0/quick-start-guide.md
@@ -25,18 +25,18 @@ values={[
]}>
<TabItem value="scala">
-```scala
-// spark-shell for spark 3
+```shell
+# spark-shell for spark 3
spark-shell \
--packages org.apache.hudi:hudi-spark3-bundle_2.12:0.8.0,org.apache.spark:spark-avro_2.12:3.0.1 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
-// spark-shell for spark 2 with scala 2.12
+# spark-shell for spark 2 with scala 2.12
spark-shell \
--packages org.apache.hudi:hudi-spark-bundle_2.12:0.8.0,org.apache.spark:spark-avro_2.12:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
-// spark-shell for spark 2 with scala 2.11
+# spark-shell for spark 2 with scala 2.11
spark-shell \
--packages org.apache.hudi:hudi-spark-bundle_2.11:0.8.0,org.apache.spark:spark-avro_2.11:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
@@ -45,23 +45,23 @@ spark-shell \
</TabItem>
<TabItem value="python">
-```python
+```shell
# pyspark
export PYSPARK_PYTHON=$(which python3)
# for spark3
-pyspark
---packages org.apache.hudi:hudi-spark3-bundle_2.12:0.8.0,org.apache.spark:spark-avro_2.12:3.0.1
+pyspark \
+--packages org.apache.hudi:hudi-spark3-bundle_2.12:0.8.0,org.apache.spark:spark-avro_2.12:3.0.1 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
# for spark2 with scala 2.12
-pyspark
---packages org.apache.hudi:hudi-spark-bundle_2.12:0.8.0,org.apache.spark:spark-avro_2.12:2.4.4
+pyspark \
+--packages org.apache.hudi:hudi-spark-bundle_2.12:0.8.0,org.apache.spark:spark-avro_2.12:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
# for spark2 with scala 2.11
-pyspark
---packages org.apache.hudi:hudi-spark-bundle_2.11:0.8.0,org.apache.spark:spark-avro_2.11:2.4.4
+pyspark \
+--packages org.apache.hudi:hudi-spark-bundle_2.11:0.8.0,org.apache.spark:spark-avro_2.11:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
```
diff --git a/website/versioned_docs/version-0.9.0/quick-start-guide.md b/website/versioned_docs/version-0.9.0/quick-start-guide.md
index bb226555c4d..73dda9bc099 100644
--- a/website/versioned_docs/version-0.9.0/quick-start-guide.md
+++ b/website/versioned_docs/version-0.9.0/quick-start-guide.md
@@ -27,18 +27,18 @@ values={[
<TabItem value="scala">
From the extracted directory run spark-shell with Hudi as:
-```scala
-// spark-shell for spark 3
+```shell
+# spark-shell for spark 3
spark-shell \
--packages org.apache.hudi:hudi-spark3-bundle_2.12:0.9.0,org.apache.spark:spark-avro_2.12:3.0.1 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
-// spark-shell for spark 2 with scala 2.12
+# spark-shell for spark 2 with scala 2.12
spark-shell \
--packages org.apache.hudi:hudi-spark-bundle_2.12:0.9.0,org.apache.spark:spark-avro_2.12:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
-// spark-shell for spark 2 with scala 2.11
+# spark-shell for spark 2 with scala 2.11
spark-shell \
--packages org.apache.hudi:hudi-spark-bundle_2.11:0.9.0,org.apache.spark:spark-avro_2.11:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
@@ -71,23 +71,23 @@ spark-sql \
<TabItem value="python">
From the extracted directory run pyspark with Hudi as:
-```python
+```shell
# pyspark
export PYSPARK_PYTHON=$(which python3)
# for spark3
-pyspark
---packages org.apache.hudi:hudi-spark3-bundle_2.12:0.9.0,org.apache.spark:spark-avro_2.12:3.0.1
+pyspark \
+--packages org.apache.hudi:hudi-spark3-bundle_2.12:0.9.0,org.apache.spark:spark-avro_2.12:3.0.1 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
# for spark2 with scala 2.12
-pyspark
---packages org.apache.hudi:hudi-spark-bundle_2.12:0.9.0,org.apache.spark:spark-avro_2.12:2.4.4
+pyspark \
+--packages org.apache.hudi:hudi-spark-bundle_2.12:0.9.0,org.apache.spark:spark-avro_2.12:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
# for spark2 with scala 2.11
-pyspark
---packages org.apache.hudi:hudi-spark-bundle_2.11:0.9.0,org.apache.spark:spark-avro_2.11:2.4.4
+pyspark \
+--packages org.apache.hudi:hudi-spark-bundle_2.11:0.9.0,org.apache.spark:spark-avro_2.11:2.4.4 \
--conf 'spark.serializer=org.apache.spark.serializer.KryoSerializer'
```