You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@griffin.apache.org by wa...@apache.org on 2022/01/20 06:52:58 UTC

[griffin] branch master updated: [GRIFFIN-367] For task GRIFFIN-367, update local deploy document. (#596)

This is an automated email from the ASF dual-hosted git repository.

wanghe pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/griffin.git


The following commit(s) were added to refs/heads/master by this push:
     new 1681e74  [GRIFFIN-367] For task GRIFFIN-367, update local deploy document. (#596)
1681e74 is described below

commit 1681e740ca733f6ad3f88e14dec123c8f3973a74
Author: Lipeng Zhu <li...@icloud.com>
AuthorDate: Thu Jan 20 14:52:53 2022 +0800

    [GRIFFIN-367] For task GRIFFIN-367, update local deploy document. (#596)
    
    * For task GRIFFIN-367, update local deploy document.
    
    * Update doc.
---
 griffin-doc/deploy/deploy-guide.md | 44 +++++++++++---------------------------
 1 file changed, 13 insertions(+), 31 deletions(-)

diff --git a/griffin-doc/deploy/deploy-guide.md b/griffin-doc/deploy/deploy-guide.md
index 7e54d54..79ec4a9 100644
--- a/griffin-doc/deploy/deploy-guide.md
+++ b/griffin-doc/deploy/deploy-guide.md
@@ -220,84 +220,66 @@ Here you can access http://127.0.0.1:8088/cluster/nodes to check hadoop nodes, y
 * **update configuration**
 Copy hive/conf/hive-site.xml.template to hive/conf/hive-site.xml and update some fields.
 ```xml
-+++ hive/conf/hive-site.xml	2018-12-16 11:17:51.000000000 +0800
-@@ -72,12 +72,12 @@
-   </property>
+<configuration>
    <property>
      <name>hive.exec.local.scratchdir</name>
--    <value>${system:java.io.tmpdir}/${system:user.name}</value>
-+    <value>/apache/tmp/hive</value>
+     <value>/apache/tmp/hive</value>
      <description>Local scratch space for Hive jobs</description>
    </property>
    <property>
      <name>hive.downloaded.resources.dir</name>
--    <value>${system:java.io.tmpdir}/${hive.session.id}_resources</value>
-+    <value>/apache/tmp/hive/${hive.session.id}_resources</value>
+     <value>/apache/tmp/hive/${hive.session.id}_resources</value>
      <description>Temporary local directory for added resources in the remote file system.</description>
    </property>
    <property>
-@@ -368,7 +368,7 @@
    </property>
    <property>
      <name>hive.metastore.uris</name>
--    <value/>
-+    <value>thrift://127.0.0.1:9083</value>
+     <value>thrift://127.0.0.1:9083</value>
      <description>Thrift URI for the remote metastore.</description>
    </property>
    <property>
-@@ -527,7 +527,7 @@
    </property>
    <property>
      <name>javax.jdo.option.ConnectionPassword</name>
--    <value>mine</value>
-+    <value>secret</value>
+     <value>secret</value>
      <description>password to use against metastore database</description>
    </property>
    <property>
-@@ -542,7 +542,7 @@
    </property>
    <property>
      <name>javax.jdo.option.ConnectionURL</name>
--    <value>jdbc:derby:;databaseName=metastore_db;create=true</value>
-+    <value>jdbc:postgresql://127.0.0.1/myDB?ssl=false</value>
+     <value>jdbc:postgresql://127.0.0.1/myDB?ssl=false</value>
      <description>
        JDBC connect string for a JDBC metastore.
        To use SSL to encrypt/authenticate the connection, provide database-specific SSL flag in the connection URL.
-@@ -1017,7 +1017,7 @@
    </property>
    <property>
      <name>javax.jdo.option.ConnectionDriverName</name>
--    <value>org.apache.derby.jdbc.EmbeddedDriver</value>
-+    <value>org.postgresql.Driver</value>
+     <value>org.postgresql.Driver</value>
      <description>Driver class name for a JDBC metastore</description>
    </property>
    <property>
-@@ -1042,7 +1042,7 @@
    </property>
    <property>
      <name>javax.jdo.option.ConnectionUserName</name>
--    <value>APP</value>
-+    <value>king</value>
+     <value>king</value>
      <description>Username to use against metastore database</description>
    </property>
    <property>
-@@ -1682,7 +1682,7 @@
    </property>
    <property>
      <name>hive.querylog.location</name>
--    <value>${system:java.io.tmpdir}/${system:user.name}</value>
-+    <value>/apache/tmp/hive</value>
+     <value>/apache/tmp/hive</value>
      <description>Location of Hive run time structured log file</description>
    </property>
    <property>
-@@ -3973,7 +3973,7 @@
    </property>
    <property>
      <name>hive.server2.logging.operation.log.location</name>
--    <value>${system:java.io.tmpdir}/${system:user.name}/operation_logs</value>
-+    <value>/apache/tmp/hive/operation_logs</value>
+     <value>/apache/tmp/hive/operation_logs</value>
    </property>
-   <property>
+</configuration>
 ```
 
 * **start up hive metastore service**
@@ -533,7 +515,7 @@ Similar changes are required in `env_streaming.json`.
 }
 ```
 
-It's easy to build Griffin, just run maven command `mvn clean install`. Successfully building, you can get `service-${version}.tar.gz` and `measure-${version}.jar` from target folder in service and measure module.
+It's easy to build Griffin, just run maven command `mvn clean install`. Successfully building, you can get `service-${version}.tar.gz` in root target folder and `measure-${version}.jar` in measure module target folder.
 
 Upload measure's jar to hadoop folder.
 ```
@@ -546,7 +528,7 @@ hdfs dfs -put griffin-measure.jar /griffin/
 Startup serviceļ¼Œrun Griffin management service.
 ```
 cd $GRIFFIN_INSTALL_DIR
-tar -zxvf service-${version}.tar.gz
+tar -zxvf target/service-${version}.tar.gz
 cd service-${version}
 # start service
 ./bin/griffin.sh start