You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@bigtop.apache.org by yw...@apache.org on 2016/12/14 07:48:27 UTC

bigtop git commit: BIGTOP-2605: Addendum for BIGTOP-2514

Repository: bigtop
Updated Branches:
  refs/heads/master 3466b5f6e -> 5bee2caee


BIGTOP-2605: Addendum for BIGTOP-2514


Project: http://git-wip-us.apache.org/repos/asf/bigtop/repo
Commit: http://git-wip-us.apache.org/repos/asf/bigtop/commit/5bee2cae
Tree: http://git-wip-us.apache.org/repos/asf/bigtop/tree/5bee2cae
Diff: http://git-wip-us.apache.org/repos/asf/bigtop/diff/5bee2cae

Branch: refs/heads/master
Commit: 5bee2caeea2a13ff58fb08a04dceb358864825bc
Parents: 3466b5f
Author: Youngwoo Kim <yw...@apache.org>
Authored: Thu Nov 24 11:01:39 2016 +0900
Committer: Youngwoo Kim <yw...@apache.org>
Committed: Wed Dec 14 16:47:06 2016 +0900

----------------------------------------------------------------------
 .../src/common/zeppelin/install_zeppelin.sh     |  2 +
 .../src/common/zeppelin/zeppelin-env.sh         | 72 +++++++++++++++++++-
 2 files changed, 71 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/bigtop/blob/5bee2cae/bigtop-packages/src/common/zeppelin/install_zeppelin.sh
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/zeppelin/install_zeppelin.sh b/bigtop-packages/src/common/zeppelin/install_zeppelin.sh
index 29e3e30..0e305c6 100644
--- a/bigtop-packages/src/common/zeppelin/install_zeppelin.sh
+++ b/bigtop-packages/src/common/zeppelin/install_zeppelin.sh
@@ -122,10 +122,12 @@ tar --wildcards --strip-components=1 -C $PREFIX/$LIB_DIR -zxf ${BUILD_DIR}/zeppe
 tar --wildcards --strip-components=1 -C $PREFIX/$LIB_DIR -zxf ${BUILD_DIR}/zeppelin-distribution/target/zeppelin-*.tar.gz \*zeppelin-web\*.war
 tar --wildcards --strip-components=1 -C $PREFIX/var/lib/zeppelin -zxf ${BUILD_DIR}/zeppelin-distribution/target/zeppelin-*.tar.gz \*/notebook/\*
 
+rm -f $PREFIX/$LIB_DIR/bin/*.cmd
 chmod 755 $PREFIX/$LIB_DIR/bin/*
 
 cp -a ${BUILD_DIR}/{LICENSE,README.md} $PREFIX/$LIB_DIR
 
 cp -a ${BUILD_DIR}/conf/* $PREFIX/$CONF_DIR
+rm -f $PREFIX/$CONF_DIR/*.cmd.*
 cp -a ${SOURCE_DIR}/zeppelin-env.sh $PREFIX/$CONF_DIR
 ln -s /etc/zeppelin/conf $PREFIX/$LIB_DIR/conf

http://git-wip-us.apache.org/repos/asf/bigtop/blob/5bee2cae/bigtop-packages/src/common/zeppelin/zeppelin-env.sh
----------------------------------------------------------------------
diff --git a/bigtop-packages/src/common/zeppelin/zeppelin-env.sh b/bigtop-packages/src/common/zeppelin/zeppelin-env.sh
index 038d13e..abc5f33 100644
--- a/bigtop-packages/src/common/zeppelin/zeppelin-env.sh
+++ b/bigtop-packages/src/common/zeppelin/zeppelin-env.sh
@@ -13,13 +13,79 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-export ZEPPELIN_INTERPRETERS="org.apache.zeppelin.spark.SparkInterpreter,org.apache.zeppelin.spark.PySparkInterpreter,org.apache.zeppelin.spark.SparkSqlInterpreter,org.apache.zeppelin.spark.DepInterpreter,org.apache.zeppelin.markdown.Markdown,org.apache.zeppelin.angular.AngularInterpreter,org.apache.zeppelin.shell.ShellInterpreter,org.apache.zeppelin.hive.HiveInterpreter"
-export ZEPPELIN_PORT=8080
+# export JAVA_HOME=
+# export MASTER=                 		# Spark master url. eg. spark://master_addr:7077. Leave empty if you want to use local mode.
+# export ZEPPELIN_JAVA_OPTS      		# Additional jvm options. for example, export ZEPPELIN_JAVA_OPTS="-Dspark.executor.memory=8g -Dspark.cores.max=16"
+# export ZEPPELIN_MEM            		# Zeppelin jvm mem options Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m
+# export ZEPPELIN_INTP_MEM       		# zeppelin interpreter process jvm mem options. Default -Xms1024m -Xmx1024m -XX:MaxPermSize=512m
+# export ZEPPELIN_INTP_JAVA_OPTS 		# zeppelin interpreter process jvm options.
+
+# export ZEPPELIN_LOG_DIR        		# Where log files are stored.  PWD by default.
+# export ZEPPELIN_PID_DIR        		# The pid files are stored. ${ZEPPELIN_HOME}/run by default.
+# export ZEPPELIN_WAR_TEMPDIR    		# The location of jetty temporary directory.
+# export ZEPPELIN_NOTEBOOK_DIR   		# Where notebook saved
+# export ZEPPELIN_NOTEBOOK_HOMESCREEN		# Id of notebook to be displayed in homescreen. ex) 2A94M5J1Z
+# export ZEPPELIN_NOTEBOOK_HOMESCREEN_HIDE	# hide homescreen notebook from list when this value set to "true". default "false"
+# export ZEPPELIN_NOTEBOOK_S3_BUCKET        # Bucket where notebook saved
+# export ZEPPELIN_NOTEBOOK_S3_ENDPOINT      # Endpoint of the bucket
+# export ZEPPELIN_NOTEBOOK_S3_USER          # User in bucket where notebook saved. For example bucket/user/notebook/2A94M5J1Z/note.json
+# export ZEPPELIN_IDENT_STRING   		# A string representing this instance of zeppelin. $USER by default.
+# export ZEPPELIN_NICENESS       		# The scheduling priority for daemons. Defaults to 0.
+# export ZEPPELIN_INTERPRETER_LOCALREPO         # Local repository for interpreter's additional dependency loading
+# export ZEPPELIN_NOTEBOOK_STORAGE 		# Refers to pluggable notebook storage class, can have two classes simultaneously with a sync between them (e.g. local and remote).
+
+#### Spark interpreter configuration ####
+
+## Use provided spark installation ##
+## defining SPARK_HOME makes Zeppelin run spark interpreter process using spark-submit
+##
+# export SPARK_HOME                             # (required) When it is defined, load it instead of Zeppelin embedded Spark libraries
+# export SPARK_SUBMIT_OPTIONS                   # (optional) extra options to pass to spark submit. eg) "--driver-memory 512M --executor-memory 1G".
+# export SPARK_APP_NAME                         # (optional) The name of spark application.
+
+## Use embedded spark binaries ##
+## without SPARK_HOME defined, Zeppelin still able to run spark interpreter process using embedded spark binaries.
+## however, it is not encouraged when you can define SPARK_HOME
+##
+# Options read in YARN client mode
+# export HADOOP_CONF_DIR         		# yarn-site.xml is located in configuration directory in HADOOP_CONF_DIR.
+# Pyspark (supported with Spark 1.2.1 and above)
+# To configure pyspark, you need to set spark distribution's path to 'spark.home' property in Interpreter setting screen in Zeppelin GUI
+# export PYSPARK_PYTHON          		# path to the python command. must be the same path on the driver(Zeppelin) and all workers.
+# export PYTHONPATH
+
+## Spark interpreter options ##
+##
+# export ZEPPELIN_SPARK_USEHIVECONTEXT  # Use HiveContext instead of SQLContext if set true. true by default.
+# export ZEPPELIN_SPARK_CONCURRENTSQL   # Execute multiple SQL concurrently if set true. false by default.
+# export ZEPPELIN_SPARK_IMPORTIMPLICIT  # Import implicits, UDF collection, and sql if set true. true by default.
+# export ZEPPELIN_SPARK_MAXRESULT       # Max number of SparkSQL result to display. 1000 by default.
+# export ZEPPELIN_WEBSOCKET_MAX_TEXT_MESSAGE_SIZE       # Size in characters of the maximum text message to be received by websocket. Defaults to 1024000
+
+
+#### HBase interpreter configuration ####
+
+## To connect to HBase running on a cluster, either HBASE_HOME or HBASE_CONF_DIR must be set
+
+# export HBASE_HOME=                    # (require) Under which HBase scripts and configuration should be
+# export HBASE_CONF_DIR=                # (optional) Alternatively, configuration directory can be set to point to the directory that has hbase-site.xml
+
+#### ZeppelinHub connection configuration ####
+# export ZEPPELINHUB_API_ADDRESS		# Refers to the address of the ZeppelinHub service in use
+# export ZEPPELINHUB_API_TOKEN			# Refers to the Zeppelin instance token of the user
+# export ZEPPELINHUB_USER_KEY			# Optional, when using Zeppelin with authentication.
+
+export MASTER=yarn-client
+
 export ZEPPELIN_CONF_DIR=/etc/zeppelin/conf
 export ZEPPELIN_LOG_DIR=/var/log/zeppelin
 export ZEPPELIN_PID_DIR=/var/run/zeppelin
 export ZEPPELIN_WAR_TEMPDIR=/var/run/zeppelin/webapps
 export ZEPPELIN_NOTEBOOK_DIR=/var/lib/zeppelin/notebook
-export MASTER=yarn-client
+
 export SPARK_HOME=/usr/lib/spark
+
 export HADOOP_CONF_DIR=/etc/hadoop/conf
+
+export HBASE_HOME=/usr/lib/hbase
+export HBASE_CONF_DIR=/etc/hbase/conf