You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@eagle.apache.org by mw...@apache.org on 2016/07/28 01:57:17 UTC

incubator-eagle git commit: fix typo in files

Repository: incubator-eagle
Updated Branches:
  refs/heads/master 9d9c30cc8 -> a6ac616e4


fix typo in files


Project: http://git-wip-us.apache.org/repos/asf/incubator-eagle/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-eagle/commit/a6ac616e
Tree: http://git-wip-us.apache.org/repos/asf/incubator-eagle/tree/a6ac616e
Diff: http://git-wip-us.apache.org/repos/asf/incubator-eagle/diff/a6ac616e

Branch: refs/heads/master
Commit: a6ac616e40d8cfec817d30bb7097277fb29ed84f
Parents: 9d9c30c
Author: anyway1021 <mw...@apache.org>
Authored: Thu Jul 28 09:56:43 2016 +0800
Committer: anyway1021 <mw...@apache.org>
Committed: Thu Jul 28 09:56:43 2016 +0800

----------------------------------------------------------------------
 .../eagle-stream-pipeline/src/test/resources/pipeline_3.conf   | 2 +-
 eagle-docs/tutorial/application_manager_tutorial.md            | 2 +-
 .../security/hdfs/test/HDFSResourceAccessConfigTest.java       | 6 +++---
 3 files changed, 5 insertions(+), 5 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/a6ac616e/eagle-core/eagle-data-process/eagle-stream-pipeline/src/test/resources/pipeline_3.conf
----------------------------------------------------------------------
diff --git a/eagle-core/eagle-data-process/eagle-stream-pipeline/src/test/resources/pipeline_3.conf b/eagle-core/eagle-data-process/eagle-stream-pipeline/src/test/resources/pipeline_3.conf
index b1c1955..9dc7ce3 100644
--- a/eagle-core/eagle-data-process/eagle-stream-pipeline/src/test/resources/pipeline_3.conf
+++ b/eagle-core/eagle-data-process/eagle-stream-pipeline/src/test/resources/pipeline_3.conf
@@ -34,7 +34,7 @@
 			"site" : "sandbox"
 			"application": "HADOOP"
 			"dataJoinPollIntervalSec" : 30
-			"mailHost" : "atom.corp.ebay.com"
+			"mailHost" : "some.mail.server"
 			"mailSmtpPort":"25"
 			"mailDebug" : "true"
 			"eagleService": {

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/a6ac616e/eagle-docs/tutorial/application_manager_tutorial.md
----------------------------------------------------------------------
diff --git a/eagle-docs/tutorial/application_manager_tutorial.md b/eagle-docs/tutorial/application_manager_tutorial.md
index 0e342c5..d2a5eba 100644
--- a/eagle-docs/tutorial/application_manager_tutorial.md
+++ b/eagle-docs/tutorial/application_manager_tutorial.md
@@ -95,7 +95,7 @@ Note: these configurations can be overridden in the topology configurations, whi
            app.eagleProps.site=sandbox
            app.eagleProps.application=hbaseSecurityLog
            app.eagleProps.dataJoinPollIntervalSec=30
-           app.eagleProps.mailHost=atom.corp.ebay.com
+           app.eagleProps.mailHost=some.mail.server
            app.eagleProps.mailSmtpPort=25
            app.eagleProps.mailDebug=true
            app.eagleProps.eagleService.host=localhost

http://git-wip-us.apache.org/repos/asf/incubator-eagle/blob/a6ac616e/eagle-security/eagle-security-hdfs-web/src/test/java/org/apache/eagle/service/security/hdfs/test/HDFSResourceAccessConfigTest.java
----------------------------------------------------------------------
diff --git a/eagle-security/eagle-security-hdfs-web/src/test/java/org/apache/eagle/service/security/hdfs/test/HDFSResourceAccessConfigTest.java b/eagle-security/eagle-security-hdfs-web/src/test/java/org/apache/eagle/service/security/hdfs/test/HDFSResourceAccessConfigTest.java
index f046d0a..a5cfa03 100644
--- a/eagle-security/eagle-security-hdfs-web/src/test/java/org/apache/eagle/service/security/hdfs/test/HDFSResourceAccessConfigTest.java
+++ b/eagle-security/eagle-security-hdfs-web/src/test/java/org/apache/eagle/service/security/hdfs/test/HDFSResourceAccessConfigTest.java
@@ -26,15 +26,15 @@ public class HDFSResourceAccessConfigTest {
 
 	@Test
 	public void testHDFSResourceAccessConfig() throws Exception {
-		String configStr = "{\"fs.defaultFS\":\"hdfs://sandbox-nn-ha\",\"dfs.nameservices\":\"sandbox-nn-ha\",\"dfs.ha.namenodes.sandbox-nn-ha\":\"nn1,nn2\",\"dfs.namenode.rpc-address.sandbox-nn-ha.nn1\":\"sandbox-nn.vip.ebay.com:8020\",\"dfs.namenode.rpc-address.sandbox-nn-ha.nn2\":\"sandbox-nn-2.vip.ebay.com:8020\",\"dfs.client.failover.proxy.provider.sandbox-nn-ha\":\"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider\",\"hadoop.security.authentication\":\"kerberos\",\"dfs.namenode.kerberos.principal\":\"hadoop/_HOST@EXAMPLE.COM\"}";
+		String configStr = "{\"fs.defaultFS\":\"hdfs://sandbox-nn-ha\",\"dfs.nameservices\":\"sandbox-nn-ha\",\"dfs.ha.namenodes.sandbox-nn-ha\":\"nn1,nn2\",\"dfs.namenode.rpc-address.sandbox-nn-ha.nn1\":\"sandbox-nn.some.server.com:8020\",\"dfs.namenode.rpc-address.sandbox-nn-ha.nn2\":\"sandbox-nn-2.some.server.com:8020\",\"dfs.client.failover.proxy.provider.sandbox-nn-ha\":\"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider\",\"hadoop.security.authentication\":\"kerberos\",\"dfs.namenode.kerberos.principal\":\"hadoop/_HOST@EXAMPLE.COM\"}";
 		ObjectMapper mapper = new ObjectMapper();
 		Map<String, String> configMap = mapper.readValue(configStr, Map.class);
 		Map<String, String> result = new HashMap<>();
 		result.put("fs.defaultFS" , "hdfs://sandbox-nn-ha");
 		result.put("dfs.nameservices", "sandbox-nn-ha");
 		result.put("dfs.ha.namenodes.sandbox-nn-ha", "nn1,nn2");
-		result.put("dfs.namenode.rpc-address.sandbox-nn-ha.nn1", "sandbox-nn.vip.ebay.com:8020");
-		result.put("dfs.namenode.rpc-address.sandbox-nn-ha.nn2", "sandbox-nn-2.vip.ebay.com:8020");
+		result.put("dfs.namenode.rpc-address.sandbox-nn-ha.nn1", "sandbox-nn.some.server.com:8020");
+		result.put("dfs.namenode.rpc-address.sandbox-nn-ha.nn2", "sandbox-nn-2.some.server.com:8020");
 		result.put("dfs.client.failover.proxy.provider.sandbox-nn-ha","org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
 		result.put("hadoop.security.authentication", "kerberos");
 		result.put("dfs.namenode.kerberos.principal", "hadoop/_HOST@EXAMPLE.COM");