You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/11/03 09:02:21 UTC
svn commit: r1196986 [1/2] - in
/hadoop/common/trunk/hadoop-mapreduce-project: ./
hadoop-mapreduce-client/hadoop-mapreduce-client-core/
hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/
hadoop-mapredu...
Author: vinodkv
Date: Thu Nov 3 08:02:19 2011
New Revision: 1196986
URL: http://svn.apache.org/viewvc?rev=1196986&view=rev
Log:
MAPREDUCE-3297. Moved log related components into yarn-common so that HistoryServer and clients can use them without depending on the yarn-server-nodemanager module. Contributed by Siddharth Seth.
Added:
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/ContainerLogsRetentionPolicy.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnWebParams.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsNavBlock.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsPage.java
Removed:
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AggregatedLogFormat.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/ContainerLogsRetentionPolicy.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogDumper.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AggregatedLogsNavBlock.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebParams.java
Modified:
hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsLogsPage.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/event/LogHandlerAppStartedEvent.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AggregatedLogsBlock.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AggregatedLogsPage.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMController.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NavBlock.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/TestLogAggregationService.java
hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/TestNonAggregatingLogHandler.java
Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Thu Nov 3 08:02:19 2011
@@ -50,6 +50,22 @@ Trunk (unreleased changes)
MAPREDUCE-3014. Rename and invert logic of '-cbuild' profile to 'native' and off
by default. (tucu)
+Release 0.23.1 - Unreleased
+
+ INCOMPATIBLE CHANGES
+
+ NEW FEATURES
+
+ IMPROVEMENTS
+
+ MAPREDUCE-3297. Moved log related components into yarn-common so that
+ HistoryServer and clients can use them without depending on the
+ yarn-server-nodemanager module. (Siddharth Seth via vinodkv)
+
+ OPTIMIZATIONS
+
+ BUG FIXES
+
Release 0.23.0 - 2011-11-01
INCOMPATIBLE CHANGES
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/pom.xml Thu Nov 3 08:02:19 2011
@@ -36,10 +36,6 @@
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
- <artifactId>hadoop-yarn-server-nodemanager</artifactId>
- </dependency>
- <dependency>
- <groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-hdfs</artifactId>
</dependency>
</dependencies>
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Thu Nov 3 08:02:19 2011
@@ -46,7 +46,7 @@ import org.apache.hadoop.mapreduce.v2.Lo
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.LogDumper;
+import org.apache.hadoop.yarn.logaggregation.LogDumper;
/**
* Interprets the map reduce cli options
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsController.java Thu Nov 3 08:02:19 2011
@@ -23,8 +23,8 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
import org.apache.hadoop.mapreduce.v2.app.webapp.AppController;
-import org.apache.hadoop.yarn.server.nodemanager.webapp.AggregatedLogsPage;
import org.apache.hadoop.yarn.webapp.View;
+import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
import com.google.inject.Inject;
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsLogsPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsLogsPage.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsLogsPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsLogsPage.java Thu Nov 3 08:02:19 2011
@@ -17,11 +17,11 @@
*/
package org.apache.hadoop.mapreduce.v2.hs.webapp;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.ENTITY_STRING;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
-import org.apache.hadoop.yarn.server.nodemanager.webapp.AggregatedLogsBlock;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.log.AggregatedLogsBlock;
public class HsLogsPage extends HsView {
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java Thu Nov 3 08:02:19 2011
@@ -18,11 +18,11 @@
package org.apache.hadoop.mapreduce.v2.hs.webapp;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.NM_NODENAME;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.ENTITY_STRING;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.APP_OWNER;
import static org.apache.hadoop.yarn.util.StringHelper.pajoin;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.APP_OWNER;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.NM_NODENAME;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.webapp.AMParams;
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/webapp/TestHSWebApp.java Thu Nov 3 08:02:19 2011
@@ -22,10 +22,10 @@ import static org.apache.hadoop.mapreduc
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.ATTEMPT_STATE;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.CONTAINER_ID;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.NM_NODENAME;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.ENTITY_STRING;
-import static org.apache.hadoop.yarn.server.nodemanager.webapp.NMWebParams.APP_OWNER;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.APP_OWNER;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.NM_NODENAME;
import static org.junit.Assert.assertEquals;
import java.io.IOException;
@@ -44,8 +44,8 @@ import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.event.EventHandler;
-import org.apache.hadoop.yarn.server.nodemanager.webapp.AggregatedLogsPage;
import org.apache.hadoop.yarn.util.BuilderUtils;
+import org.apache.hadoop.yarn.webapp.log.AggregatedLogsPage;
import org.apache.hadoop.yarn.webapp.test.WebAppTests;
import org.junit.Test;
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/dev-support/findbugs-exclude.xml Thu Nov 3 08:02:19 2011
@@ -208,7 +208,7 @@
<!-- Ignore EI_EXPOSE_REP2 in Log services -->
<Match>
- <Class name="org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat$LogValue" />
+ <Class name="org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat$LogValue" />
<Bug pattern="EI_EXPOSE_REP2" />
</Match>
<Match>
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/AggregatedLogFormat.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,482 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.logaggregation;
+
+import java.io.DataInput;
+import java.io.DataInputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.io.EOFException;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.io.Writer;
+import java.security.PrivilegedExceptionAction;
+import java.util.EnumSet;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.commons.io.input.BoundedInputStream;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.CreateFlag;
+import org.apache.hadoop.fs.FSDataInputStream;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Options;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.file.tfile.TFile;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+
+public class AggregatedLogFormat {
+
+ static final Log LOG = LogFactory.getLog(AggregatedLogFormat.class);
+ private static final LogKey APPLICATION_ACL_KEY = new LogKey("APPLICATION_ACL");
+ private static final LogKey APPLICATION_OWNER_KEY = new LogKey("APPLICATION_OWNER");
+ private static final LogKey VERSION_KEY = new LogKey("VERSION");
+ private static final Map<String, LogKey> RESERVED_KEYS;
+ //Maybe write out the retention policy.
+ //Maybe write out a list of containerLogs skipped by the retention policy.
+ private static final int VERSION = 1;
+
+ static {
+ RESERVED_KEYS = new HashMap<String, AggregatedLogFormat.LogKey>();
+ RESERVED_KEYS.put(APPLICATION_ACL_KEY.toString(), APPLICATION_ACL_KEY);
+ RESERVED_KEYS.put(APPLICATION_OWNER_KEY.toString(), APPLICATION_OWNER_KEY);
+ RESERVED_KEYS.put(VERSION_KEY.toString(), VERSION_KEY);
+ }
+
+ public static class LogKey implements Writable {
+
+ private String keyString;
+
+ public LogKey() {
+
+ }
+
+ public LogKey(ContainerId containerId) {
+ this.keyString = containerId.toString();
+ }
+
+ public LogKey(String keyString) {
+ this.keyString = keyString;
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ out.writeUTF(this.keyString);
+ }
+
+ @Override
+ public void readFields(DataInput in) throws IOException {
+ this.keyString = in.readUTF();
+ }
+
+ @Override
+ public String toString() {
+ return this.keyString;
+ }
+ }
+
+ public static class LogValue {
+
+ private final String[] rootLogDirs;
+ private final ContainerId containerId;
+ // TODO Maybe add a version string here. Instead of changing the version of
+ // the entire k-v format
+
+ public LogValue(String[] rootLogDirs, ContainerId containerId) {
+ this.rootLogDirs = rootLogDirs;
+ this.containerId = containerId;
+ }
+
+ public void write(DataOutputStream out) throws IOException {
+ for (String rootLogDir : this.rootLogDirs) {
+ File appLogDir =
+ new File(rootLogDir,
+ ConverterUtils.toString(
+ this.containerId.getApplicationAttemptId().
+ getApplicationId())
+ );
+ File containerLogDir =
+ new File(appLogDir, ConverterUtils.toString(this.containerId));
+
+ if (!containerLogDir.isDirectory()) {
+ continue; // ContainerDir may have been deleted by the user.
+ }
+
+ for (File logFile : containerLogDir.listFiles()) {
+
+ // Write the logFile Type
+ out.writeUTF(logFile.getName());
+
+ // Write the log length as UTF so that it is printable
+ out.writeUTF(String.valueOf(logFile.length()));
+
+ // Write the log itself
+ FileInputStream in = null;
+ try {
+ in = new FileInputStream(logFile);
+ byte[] buf = new byte[65535];
+ int len = 0;
+ while ((len = in.read(buf)) != -1) {
+ out.write(buf, 0, len);
+ }
+ } finally {
+ in.close();
+ }
+ }
+ }
+ }
+ }
+
+ public static class LogWriter {
+
+ private final FSDataOutputStream fsDataOStream;
+ private final TFile.Writer writer;
+
+ public LogWriter(final Configuration conf, final Path remoteAppLogFile,
+ UserGroupInformation userUgi) throws IOException {
+ try {
+ this.fsDataOStream =
+ userUgi.doAs(new PrivilegedExceptionAction<FSDataOutputStream>() {
+ @Override
+ public FSDataOutputStream run() throws Exception {
+ return FileContext.getFileContext(conf).create(
+ remoteAppLogFile,
+ EnumSet.of(CreateFlag.CREATE, CreateFlag.OVERWRITE),
+ new Options.CreateOpts[] {});
+ }
+ });
+ } catch (InterruptedException e) {
+ throw new IOException(e);
+ }
+
+ // Keys are not sorted: null arg
+ // 256KB minBlockSize : Expected log size for each container too
+ this.writer =
+ new TFile.Writer(this.fsDataOStream, 256 * 1024, conf.get(
+ YarnConfiguration.NM_LOG_AGG_COMPRESSION_TYPE,
+ YarnConfiguration.DEFAULT_NM_LOG_AGG_COMPRESSION_TYPE), null, conf);
+ //Write the version string
+ writeVersion();
+ }
+
+ private void writeVersion() throws IOException {
+ DataOutputStream out = this.writer.prepareAppendKey(-1);
+ VERSION_KEY.write(out);
+ out.close();
+ out = this.writer.prepareAppendValue(-1);
+ out.writeInt(VERSION);
+ out.close();
+ this.fsDataOStream.hflush();
+ }
+
+ public void writeApplicationOwner(String user) throws IOException {
+ DataOutputStream out = this.writer.prepareAppendKey(-1);
+ APPLICATION_OWNER_KEY.write(out);
+ out.close();
+ out = this.writer.prepareAppendValue(-1);
+ out.writeUTF(user);
+ out.close();
+ }
+
+ public void writeApplicationACLs(Map<ApplicationAccessType, String> appAcls)
+ throws IOException {
+ DataOutputStream out = this.writer.prepareAppendKey(-1);
+ APPLICATION_ACL_KEY.write(out);
+ out.close();
+ out = this.writer.prepareAppendValue(-1);
+ for (Entry<ApplicationAccessType, String> entry : appAcls.entrySet()) {
+ out.writeUTF(entry.getKey().toString());
+ out.writeUTF(entry.getValue());
+ }
+ out.close();
+ }
+
+ public void append(LogKey logKey, LogValue logValue) throws IOException {
+ DataOutputStream out = this.writer.prepareAppendKey(-1);
+ logKey.write(out);
+ out.close();
+ out = this.writer.prepareAppendValue(-1);
+ logValue.write(out);
+ out.close();
+ this.fsDataOStream.hflush();
+ }
+
+ public void closeWriter() {
+ try {
+ this.writer.close();
+ } catch (IOException e) {
+ LOG.warn("Exception closing writer", e);
+ }
+ try {
+ this.fsDataOStream.close();
+ } catch (IOException e) {
+ LOG.warn("Exception closing output-stream", e);
+ }
+ }
+ }
+
+ public static class LogReader {
+
+ private final FSDataInputStream fsDataIStream;
+ private final TFile.Reader.Scanner scanner;
+ private final TFile.Reader reader;
+
+ public LogReader(Configuration conf, Path remoteAppLogFile)
+ throws IOException {
+ FileContext fileContext = FileContext.getFileContext(conf);
+ this.fsDataIStream = fileContext.open(remoteAppLogFile);
+ reader =
+ new TFile.Reader(this.fsDataIStream, fileContext.getFileStatus(
+ remoteAppLogFile).getLen(), conf);
+ this.scanner = reader.createScanner();
+ }
+
+ private boolean atBeginning = true;
+
+ /**
+ * Returns the owner of the application.
+ *
+ * @return the application owner.
+ * @throws IOException
+ */
+ public String getApplicationOwner() throws IOException {
+ TFile.Reader.Scanner ownerScanner = reader.createScanner();
+ LogKey key = new LogKey();
+ while (!ownerScanner.atEnd()) {
+ TFile.Reader.Scanner.Entry entry = ownerScanner.entry();
+ key.readFields(entry.getKeyStream());
+ if (key.toString().equals(APPLICATION_OWNER_KEY.toString())) {
+ DataInputStream valueStream = entry.getValueStream();
+ return valueStream.readUTF();
+ }
+ ownerScanner.advance();
+ }
+ return null;
+ }
+
+ /**
+ * Returns ACLs for the application. An empty map is returned if no ACLs are
+ * found.
+ *
+ * @return a map of the Application ACLs.
+ * @throws IOException
+ */
+ public Map<ApplicationAccessType, String> getApplicationAcls()
+ throws IOException {
+ // TODO Seek directly to the key once a comparator is specified.
+ TFile.Reader.Scanner aclScanner = reader.createScanner();
+ LogKey key = new LogKey();
+ Map<ApplicationAccessType, String> acls =
+ new HashMap<ApplicationAccessType, String>();
+ while (!aclScanner.atEnd()) {
+ TFile.Reader.Scanner.Entry entry = aclScanner.entry();
+ key.readFields(entry.getKeyStream());
+ if (key.toString().equals(APPLICATION_ACL_KEY.toString())) {
+ DataInputStream valueStream = entry.getValueStream();
+ while (true) {
+ String appAccessOp = null;
+ String aclString = null;
+ try {
+ appAccessOp = valueStream.readUTF();
+ } catch (EOFException e) {
+ // Valid end of stream.
+ break;
+ }
+ try {
+ aclString = valueStream.readUTF();
+ } catch (EOFException e) {
+ throw new YarnException("Error reading ACLs", e);
+ }
+ acls.put(ApplicationAccessType.valueOf(appAccessOp), aclString);
+ }
+
+ }
+ aclScanner.advance();
+ }
+ return acls;
+ }
+
+ /**
+ * Read the next key and return the value-stream.
+ *
+ * @param key
+ * @return the valueStream if there are more keys or null otherwise.
+ * @throws IOException
+ */
+ public DataInputStream next(LogKey key) throws IOException {
+ if (!this.atBeginning) {
+ this.scanner.advance();
+ } else {
+ this.atBeginning = false;
+ }
+ if (this.scanner.atEnd()) {
+ return null;
+ }
+ TFile.Reader.Scanner.Entry entry = this.scanner.entry();
+ key.readFields(entry.getKeyStream());
+ // Skip META keys
+ if (RESERVED_KEYS.containsKey(key.toString())) {
+ return next(key);
+ }
+ DataInputStream valueStream = entry.getValueStream();
+ return valueStream;
+ }
+
+
+ //TODO Change Log format and interfaces to be containerId specific.
+ // Avoid returning completeValueStreams.
+// public List<String> getTypesForContainer(DataInputStream valueStream){}
+//
+// /**
+// * @param valueStream
+// * The Log stream for the container.
+// * @param fileType
+// * the log type required.
+// * @return An InputStreamReader for the required log type or null if the
+// * type is not found.
+// * @throws IOException
+// */
+// public InputStreamReader getLogStreamForType(DataInputStream valueStream,
+// String fileType) throws IOException {
+// valueStream.reset();
+// try {
+// while (true) {
+// String ft = valueStream.readUTF();
+// String fileLengthStr = valueStream.readUTF();
+// long fileLength = Long.parseLong(fileLengthStr);
+// if (ft.equals(fileType)) {
+// BoundedInputStream bis =
+// new BoundedInputStream(valueStream, fileLength);
+// return new InputStreamReader(bis);
+// } else {
+// long totalSkipped = 0;
+// long currSkipped = 0;
+// while (currSkipped != -1 && totalSkipped < fileLength) {
+// currSkipped = valueStream.skip(fileLength - totalSkipped);
+// totalSkipped += currSkipped;
+// }
+// // TODO Verify skip behaviour.
+// if (currSkipped == -1) {
+// return null;
+// }
+// }
+// }
+// } catch (EOFException e) {
+// return null;
+// }
+// }
+
+ /**
+ * Writes all logs for a single container to the provided writer.
+ * @param valueStream
+ * @param writer
+ * @throws IOException
+ */
+ public static void readAcontainerLogs(DataInputStream valueStream,
+ Writer writer) throws IOException {
+ int bufferSize = 65536;
+ char[] cbuf = new char[bufferSize];
+ String fileType;
+ String fileLengthStr;
+ long fileLength;
+
+ while (true) {
+ try {
+ fileType = valueStream.readUTF();
+ } catch (EOFException e) {
+ // EndOfFile
+ return;
+ }
+ fileLengthStr = valueStream.readUTF();
+ fileLength = Long.parseLong(fileLengthStr);
+ writer.write("\n\nLogType:");
+ writer.write(fileType);
+ writer.write("\nLogLength:");
+ writer.write(fileLengthStr);
+ writer.write("\nLog Contents:\n");
+ // ByteLevel
+ BoundedInputStream bis =
+ new BoundedInputStream(valueStream, fileLength);
+ InputStreamReader reader = new InputStreamReader(bis);
+ int currentRead = 0;
+ int totalRead = 0;
+ while ((currentRead = reader.read(cbuf, 0, bufferSize)) != -1) {
+ writer.write(cbuf);
+ totalRead += currentRead;
+ }
+ }
+ }
+
+ /**
+ * Keep calling this till you get a {@link EOFException} for getting logs of
+ * all types for a single container.
+ *
+ * @param valueStream
+ * @param out
+ * @throws IOException
+ */
+ public static void readAContainerLogsForALogType(
+ DataInputStream valueStream, DataOutputStream out)
+ throws IOException {
+
+ byte[] buf = new byte[65535];
+
+ String fileType = valueStream.readUTF();
+ String fileLengthStr = valueStream.readUTF();
+ long fileLength = Long.parseLong(fileLengthStr);
+ out.writeUTF("\nLogType:");
+ out.writeUTF(fileType);
+ out.writeUTF("\nLogLength:");
+ out.writeUTF(fileLengthStr);
+ out.writeUTF("\nLog Contents:\n");
+
+ int curRead = 0;
+ long pendingRead = fileLength - curRead;
+ int toRead =
+ pendingRead > buf.length ? buf.length : (int) pendingRead;
+ int len = valueStream.read(buf, 0, toRead);
+ while (len != -1 && curRead < fileLength) {
+ out.write(buf, 0, len);
+ curRead += len;
+
+ pendingRead = fileLength - curRead;
+ toRead =
+ pendingRead > buf.length ? buf.length : (int) pendingRead;
+ len = valueStream.read(buf, 0, toRead);
+ }
+ }
+
+ public void close() throws IOException {
+ this.scanner.close();
+ this.fsDataIStream.close();
+ }
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/ContainerLogsRetentionPolicy.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/ContainerLogsRetentionPolicy.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/ContainerLogsRetentionPolicy.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/ContainerLogsRetentionPolicy.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,23 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.logaggregation;
+
+public enum ContainerLogsRetentionPolicy {
+ APPLICATION_MASTER_ONLY, AM_AND_FAILED_CONTAINERS_ONLY, ALL_CONTAINERS
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogAggregationUtils.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,107 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.logaggregation;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.NodeId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+
+public class LogAggregationUtils {
+
+ /**
+ * Constructs the full filename for an application's log file per node.
+ * @param remoteRootLogDir
+ * @param appId
+ * @param user
+ * @param nodeId
+ * @param suffix
+ * @return the remote log file.
+ */
+ public static Path getRemoteNodeLogFileForApp(Path remoteRootLogDir,
+ ApplicationId appId, String user, NodeId nodeId, String suffix) {
+ return new Path(getRemoteAppLogDir(remoteRootLogDir, appId, user, suffix),
+ getNodeString(nodeId));
+ }
+
+ /**
+ * Gets the remote app log dir.
+ * @param remoteRootLogDir
+ * @param appId
+ * @param user
+ * @param suffix
+ * @return the remote application specific log dir.
+ */
+ public static Path getRemoteAppLogDir(Path remoteRootLogDir,
+ ApplicationId appId, String user, String suffix) {
+ return new Path(getRemoteLogSuffixedDir(remoteRootLogDir, user, suffix),
+ appId.toString());
+ }
+
+ /**
+ * Gets the remote suffixed log dir for the user.
+ * @param remoteRootLogDir
+ * @param user
+ * @param suffix
+ * @return the remote suffixed log dir.
+ */
+ public static Path getRemoteLogSuffixedDir(Path remoteRootLogDir,
+ String user, String suffix) {
+ if (suffix == null || suffix.isEmpty()) {
+ return getRemoteLogUserDir(remoteRootLogDir, user);
+ }
+ // TODO Maybe support suffix to be more than a single file.
+ return new Path(getRemoteLogUserDir(remoteRootLogDir, user), suffix);
+ }
+
+ // TODO Add a utility method to list available log files. Ignore the
+ // temporary ones.
+
+ /**
+ * Gets the remote log user dir.
+ * @param remoteRootLogDir
+ * @param user
+ * @return the remote per user log dir.
+ */
+ public static Path getRemoteLogUserDir(Path remoteRootLogDir, String user) {
+ return new Path(remoteRootLogDir, user);
+ }
+
+ /**
+ * Returns the suffix component of the log dir.
+ * @param conf
+ * @return the suffix which will be appended to the user log dir.
+ */
+ public static String getRemoteNodeLogDirSuffix(Configuration conf) {
+ return conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX,
+ YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
+ }
+
+
+ /**
+ * Converts a nodeId to a form used in the app log file name.
+ * @param nodeId
+ * @return the node string to be used to construct the file name.
+ */
+ private static String getNodeString(NodeId nodeId) {
+ return nodeId.toString().replace(":", "_");
+ }
+
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/logaggregation/LogDumper.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,228 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.logaggregation;
+
+import java.io.DataInputStream;
+import java.io.DataOutputStream;
+import java.io.EOFException;
+import java.io.IOException;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.RemoteIterator;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogReader;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+
+public class LogDumper extends Configured implements Tool {
+
+ private static final String CONTAINER_ID_OPTION = "containerId";
+ private static final String APPLICATION_ID_OPTION = "applicationId";
+ private static final String NODE_ADDRESS_OPTION = "nodeAddress";
+ private static final String APP_OWNER_OPTION = "appOwner";
+
+ @Override
+ public int run(String[] args) throws Exception {
+
+ Options opts = new Options();
+ opts.addOption(APPLICATION_ID_OPTION, true, "ApplicationId");
+ opts.addOption(CONTAINER_ID_OPTION, true, "ContainerId");
+ opts.addOption(NODE_ADDRESS_OPTION, true, "NodeAddress");
+ opts.addOption(APP_OWNER_OPTION, true, "AppOwner");
+
+ if (args.length < 1) {
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("general options are: ", opts);
+ return -1;
+ }
+
+ CommandLineParser parser = new GnuParser();
+ String appIdStr = null;
+ String containerIdStr = null;
+ String nodeAddress = null;
+ String appOwner = null;
+ try {
+ CommandLine commandLine = parser.parse(opts, args, true);
+ appIdStr = commandLine.getOptionValue(APPLICATION_ID_OPTION);
+ containerIdStr = commandLine.getOptionValue(CONTAINER_ID_OPTION);
+ nodeAddress = commandLine.getOptionValue(NODE_ADDRESS_OPTION);
+ appOwner = commandLine.getOptionValue(APP_OWNER_OPTION);
+ } catch (ParseException e) {
+ System.out.println("options parsing failed: " + e.getMessage());
+
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("general options are: ", opts);
+ return -1;
+ }
+
+ if (appIdStr == null) {
+ System.out.println("ApplicationId cannot be null!");
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("general options are: ", opts);
+ return -1;
+ }
+
+ RecordFactory recordFactory =
+ RecordFactoryProvider.getRecordFactory(getConf());
+ ApplicationId appId =
+ ConverterUtils.toApplicationId(recordFactory, appIdStr);
+
+ DataOutputStream out = new DataOutputStream(System.out);
+
+ if (appOwner == null || appOwner.isEmpty()) {
+ appOwner = UserGroupInformation.getCurrentUser().getShortUserName();
+ }
+ if (containerIdStr == null && nodeAddress == null) {
+ dumpAllContainersLogs(appId, appOwner, out);
+ } else if ((containerIdStr == null && nodeAddress != null)
+ || (containerIdStr != null && nodeAddress == null)) {
+ System.out.println("ContainerId or NodeAddress cannot be null!");
+ HelpFormatter formatter = new HelpFormatter();
+ formatter.printHelp("general options are: ", opts);
+ return -1;
+ } else {
+ Path remoteRootLogDir =
+ new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
+ YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
+ AggregatedLogFormat.LogReader reader =
+ new AggregatedLogFormat.LogReader(getConf(),
+ LogAggregationUtils.getRemoteNodeLogFileForApp(
+ remoteRootLogDir,
+ appId,
+ appOwner,
+ ConverterUtils.toNodeId(nodeAddress),
+ getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX,
+ YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX)));
+ return dumpAContainerLogs(containerIdStr, reader, out);
+ }
+
+ return 0;
+ }
+
+ public void dumpAContainersLogs(String appId, String containerId,
+ String nodeId, String jobOwner) throws IOException {
+ Path remoteRootLogDir =
+ new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
+ YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
+ String suffix = LogAggregationUtils.getRemoteNodeLogDirSuffix(getConf());
+ AggregatedLogFormat.LogReader reader =
+ new AggregatedLogFormat.LogReader(getConf(),
+ LogAggregationUtils.getRemoteNodeLogFileForApp(remoteRootLogDir,
+ ConverterUtils.toApplicationId(appId), jobOwner,
+ ConverterUtils.toNodeId(nodeId), suffix));
+ DataOutputStream out = new DataOutputStream(System.out);
+ dumpAContainerLogs(containerId, reader, out);
+ }
+
+ private int dumpAContainerLogs(String containerIdStr,
+ AggregatedLogFormat.LogReader reader, DataOutputStream out)
+ throws IOException {
+ DataInputStream valueStream;
+ LogKey key = new LogKey();
+ valueStream = reader.next(key);
+
+ while (valueStream != null && !key.toString().equals(containerIdStr)) {
+ // Next container
+ key = new LogKey();
+ valueStream = reader.next(key);
+ }
+
+ if (valueStream == null) {
+ System.out.println("Logs for container " + containerIdStr
+ + " are not present in this log-file.");
+ return -1;
+ }
+
+ while (true) {
+ try {
+ LogReader.readAContainerLogsForALogType(valueStream, out);
+ } catch (EOFException eof) {
+ break;
+ }
+ }
+ return 0;
+ }
+
+ private void dumpAllContainersLogs(ApplicationId appId, String appOwner,
+ DataOutputStream out) throws IOException {
+ Path remoteRootLogDir =
+ new Path(getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
+ YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
+ String user = appOwner;
+ String logDirSuffix =
+ getConf().get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
+ YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
+ //TODO Change this to get a list of files from the LAS.
+ Path remoteAppLogDir =
+ LogAggregationUtils.getRemoteAppLogDir(remoteRootLogDir, appId, user,
+ logDirSuffix);
+ RemoteIterator<FileStatus> nodeFiles =
+ FileContext.getFileContext().listStatus(remoteAppLogDir);
+ while (nodeFiles.hasNext()) {
+ FileStatus thisNodeFile = nodeFiles.next();
+ AggregatedLogFormat.LogReader reader =
+ new AggregatedLogFormat.LogReader(getConf(),
+ new Path(remoteAppLogDir, thisNodeFile.getPath().getName()));
+ try {
+
+ DataInputStream valueStream;
+ LogKey key = new LogKey();
+ valueStream = reader.next(key);
+
+ while (valueStream != null) {
+ while (true) {
+ try {
+ LogReader.readAContainerLogsForALogType(valueStream, out);
+ } catch (EOFException eof) {
+ break;
+ }
+ }
+
+ // Next container
+ key = new LogKey();
+ valueStream = reader.next(key);
+ }
+ } finally {
+ reader.close();
+ }
+ }
+ }
+
+ public static void main(String[] args) throws Exception {
+ Configuration conf = new YarnConfiguration();
+ LogDumper logDumper = new LogDumper();
+ logDumper.setConf(conf);
+ logDumper.run(args);
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/security/ApplicationACLsManager.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,125 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.server.security;
+
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.AccessControlException;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.security.AdminACLsManager;
+
+@InterfaceAudience.Private
+public class ApplicationACLsManager {
+
+ private static final Log LOG = LogFactory
+ .getLog(ApplicationACLsManager.class);
+
+ private final Configuration conf;
+ private final AdminACLsManager adminAclsManager;
+ private final ConcurrentMap<ApplicationId, Map<ApplicationAccessType, AccessControlList>> applicationACLS
+ = new ConcurrentHashMap<ApplicationId, Map<ApplicationAccessType, AccessControlList>>();
+
+ public ApplicationACLsManager(Configuration conf) {
+ this.conf = conf;
+ this.adminAclsManager = new AdminACLsManager(this.conf);
+ }
+
+ public boolean areACLsEnabled() {
+ return adminAclsManager.areACLsEnabled();
+ }
+
+ public void addApplication(ApplicationId appId,
+ Map<ApplicationAccessType, String> acls) {
+ Map<ApplicationAccessType, AccessControlList> finalMap
+ = new HashMap<ApplicationAccessType, AccessControlList>(acls.size());
+ for (Entry<ApplicationAccessType, String> acl : acls.entrySet()) {
+ finalMap.put(acl.getKey(), new AccessControlList(acl.getValue()));
+ }
+ this.applicationACLS.put(appId, finalMap);
+ }
+
+ public void removeApplication(ApplicationId appId) {
+ this.applicationACLS.remove(appId);
+ }
+
+ /**
+ * If authorization is enabled, checks whether the user (in the callerUGI) is
+ * authorized to perform the access specified by 'applicationAccessType' on
+ * the application by checking if the user is applicationOwner or part of
+ * application ACL for the specific access-type.
+ * <ul>
+ * <li>The owner of the application can have all access-types on the
+ * application</li>
+ * <li>For all other users/groups application-acls are checked</li>
+ * </ul>
+ *
+ * @param callerUGI
+ * @param applicationAccessType
+ * @param applicationOwner
+ * @param applicationId
+ * @throws AccessControlException
+ */
+ public boolean checkAccess(UserGroupInformation callerUGI,
+ ApplicationAccessType applicationAccessType, String applicationOwner,
+ ApplicationId applicationId) {
+
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("Verifying access-type " + applicationAccessType + " for "
+ + callerUGI + " on application " + applicationId + " owned by "
+ + applicationOwner);
+ }
+
+ String user = callerUGI.getShortUserName();
+ if (!areACLsEnabled()) {
+ return true;
+ }
+
+ AccessControlList applicationACL = this.applicationACLS
+ .get(applicationId).get(applicationAccessType);
+ if (applicationACL == null) {
+ if (LOG.isDebugEnabled()) {
+ LOG.debug("ACL not found for access-type " + applicationAccessType
+ + " for application " + applicationId + " owned by "
+ + applicationOwner + ". Using default ["
+ + YarnConfiguration.DEFAULT_YARN_APP_ACL + "]");
+ }
+ applicationACL =
+ new AccessControlList(YarnConfiguration.DEFAULT_YARN_APP_ACL);
+ }
+
+ // Allow application-owner for any type of access on the application
+ if (this.adminAclsManager.isAdmin(callerUGI)
+ || user.equals(applicationOwner)
+ || applicationACL.isUserAllowed(callerUGI)) {
+ return true;
+ }
+ return false;
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnWebParams.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnWebParams.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnWebParams.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/YarnWebParams.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,28 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.webapp;
+
+public interface YarnWebParams {
+ String NM_NODENAME = "nm.id";
+ String APPLICATION_ID = "app.id";
+ String CONTAINER_ID = "container.id";
+ String CONTAINER_LOG_TYPE= "log.type";
+ String ENTITY_STRING = "entity.string";
+ String APP_OWNER = "app.owner";
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,183 @@
+package org.apache.hadoop.yarn.webapp.log;
+
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.APP_OWNER;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.NM_NODENAME;
+
+import java.io.DataInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.NodeId;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
+import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
+import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
+
+import com.google.inject.Inject;
+
+public class AggregatedLogsBlock extends HtmlBlock {
+
+ private final Configuration conf;
+
+ @Inject
+ AggregatedLogsBlock(Configuration conf) {
+ this.conf = conf;
+ }
+
+ @Override
+ protected void render(Block html) {
+ ContainerId containerId = verifyAndGetContainerId(html);
+ NodeId nodeId = verifyAndGetNodeId(html);
+ String appOwner = verifyAndGetAppOwner(html);
+ if (containerId == null || nodeId == null || appOwner == null
+ || appOwner.isEmpty()) {
+ return;
+ }
+
+ ApplicationId applicationId =
+ containerId.getApplicationAttemptId().getApplicationId();
+ String logEntity = $(ENTITY_STRING);
+ if (logEntity == null || logEntity.isEmpty()) {
+ logEntity = containerId.toString();
+ }
+
+ if (!conf.getBoolean(YarnConfiguration.NM_LOG_AGGREGATION_ENABLED,
+ YarnConfiguration.DEFAULT_NM_LOG_AGGREGATION_ENABLED)) {
+ html.h1()
+ ._("Aggregation is not enabled. Try the nodemanager at " + nodeId)
+ ._();
+ return;
+ }
+
+ Path remoteRootLogDir =
+ new Path(conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR,
+ YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR));
+ AggregatedLogFormat.LogReader reader = null;
+ try {
+ reader =
+ new AggregatedLogFormat.LogReader(conf,
+ LogAggregationUtils.getRemoteNodeLogFileForApp(
+ remoteRootLogDir, applicationId, appOwner, nodeId,
+ LogAggregationUtils.getRemoteNodeLogDirSuffix(conf)));
+ } catch (FileNotFoundException e) {
+ // ACLs not available till the log file is opened.
+ html.h1()
+ ._("Logs not available for "
+ + logEntity
+ + ". Aggregation may not be complete, "
+ + "Check back later or try the nodemanager at "
+ + nodeId)._();
+ return;
+ } catch (IOException e) {
+ html.h1()._("Error getting logs for " + logEntity)._();
+ LOG.error("Error getting logs for " + logEntity, e);
+ return;
+ }
+
+ String owner = null;
+ Map<ApplicationAccessType, String> appAcls = null;
+ try {
+ owner = reader.getApplicationOwner();
+ appAcls = reader.getApplicationAcls();
+ } catch (IOException e) {
+ html.h1()._("Error getting logs for " + logEntity)._();
+ LOG.error("Error getting logs for " + logEntity, e);
+ return;
+ }
+ ApplicationACLsManager aclsManager = new ApplicationACLsManager(conf);
+ aclsManager.addApplication(applicationId, appAcls);
+
+ String remoteUser = request().getRemoteUser();
+ UserGroupInformation callerUGI = null;
+ if (remoteUser != null) {
+ callerUGI = UserGroupInformation.createRemoteUser(remoteUser);
+ }
+ if (callerUGI != null
+ && !aclsManager.checkAccess(callerUGI, ApplicationAccessType.VIEW_APP,
+ owner, applicationId)) {
+ html.h1()
+ ._("User [" + remoteUser
+ + "] is not authorized to view the logs for " + logEntity)._();
+ return;
+ }
+
+ DataInputStream valueStream;
+ LogKey key = new LogKey();
+ try {
+ valueStream = reader.next(key);
+ while (valueStream != null
+ && !key.toString().equals(containerId.toString())) {
+ valueStream = reader.next(key);
+ }
+ if (valueStream == null) {
+ html.h1()._(
+ "Logs not available for " + logEntity
+ + ". Could be caused by the rentention policy")._();
+ return;
+ }
+ writer().write("<pre>");
+ AggregatedLogFormat.LogReader.readAcontainerLogs(valueStream, writer());
+ writer().write("</pre>");
+ return;
+ } catch (IOException e) {
+ html.h1()._("Error getting logs for " + logEntity)._();
+ LOG.error("Error getting logs for " + logEntity, e);
+ return;
+ }
+ }
+
+ private ContainerId verifyAndGetContainerId(Block html) {
+ String containerIdStr = $(CONTAINER_ID);
+ if (containerIdStr == null || containerIdStr.isEmpty()) {
+ html.h1()._("Cannot get container logs without a ContainerId")._();
+ return null;
+ }
+ ContainerId containerId = null;
+ try {
+ containerId = ConverterUtils.toContainerId(containerIdStr);
+ } catch (IllegalArgumentException e) {
+ html.h1()
+ ._("Cannot get container logs for invalid containerId: "
+ + containerIdStr)._();
+ return null;
+ }
+ return containerId;
+ }
+
+ private NodeId verifyAndGetNodeId(Block html) {
+ String nodeIdStr = $(NM_NODENAME);
+ if (nodeIdStr == null || nodeIdStr.isEmpty()) {
+ html.h1()._("Cannot get container logs without a NodeId")._();
+ return null;
+ }
+ NodeId nodeId = null;
+ try {
+ nodeId = ConverterUtils.toNodeId(nodeIdStr);
+ } catch (IllegalArgumentException e) {
+ html.h1()._("Cannot get container logs. Invalid nodeId: " + nodeIdStr)
+ ._();
+ return null;
+ }
+ return nodeId;
+ }
+
+ private String verifyAndGetAppOwner(Block html) {
+ String appOwner = $(APP_OWNER);
+ if (appOwner == null || appOwner.isEmpty()) {
+ html.h1()._("Cannot get container logs without an app owner")._();
+ }
+ return appOwner;
+ }
+}
\ No newline at end of file
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsNavBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsNavBlock.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsNavBlock.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsNavBlock.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,33 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements. See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership. The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License. You may obtain a copy of the License at
+*
+* http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.webapp.log;
+
+import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
+
+public class AggregatedLogsNavBlock extends HtmlBlock {
+
+ @Override
+ protected void render(Block html) {
+ html
+ .div("#nav")
+ .h3()._("Logs")._() //
+ ._()
+ .div("#themeswitcher")._();
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsPage.java?rev=1196986&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsPage.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsPage.java Thu Nov 3 08:02:19 2011
@@ -0,0 +1,45 @@
+package org.apache.hadoop.yarn.webapp.log;
+
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.CONTAINER_ID;
+import static org.apache.hadoop.yarn.webapp.YarnWebParams.ENTITY_STRING;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION_ID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.THEMESWITCHER_ID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
+
+
+import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.view.TwoColumnLayout;
+
+
+public class AggregatedLogsPage extends TwoColumnLayout {
+
+ /* (non-Javadoc)
+ * @see org.apache.hadoop.yarn.server.nodemanager.webapp.NMView#preHead(org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML)
+ */
+ @Override
+ protected void preHead(Page.HTML<_> html) {
+ String logEntity = $(ENTITY_STRING);
+ if (logEntity == null || logEntity.isEmpty()) {
+ logEntity = $(CONTAINER_ID);
+ }
+ if (logEntity == null || logEntity.isEmpty()) {
+ logEntity = "UNKNOWN";
+ }
+ set(TITLE, join("Logs for ", logEntity));
+ set(ACCORDION_ID, "nav");
+ set(initID(ACCORDION, "nav"), "{autoHeight:false, active:0}");
+ set(THEMESWITCHER_ID, "themeswitcher");
+ }
+
+ @Override
+ protected Class<? extends SubView> content() {
+ return AggregatedLogsBlock.class;
+ }
+
+ @Override
+ protected Class<? extends SubView> nav() {
+ return AggregatedLogsNavBlock.class;
+ }
+}
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/application/ApplicationImpl.java Thu Nov 3 08:02:19 2011
@@ -32,6 +32,7 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.Dispatcher;
+import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.AuxServicesEventType;
@@ -41,7 +42,6 @@ import org.apache.hadoop.yarn.server.nod
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.ApplicationLocalizationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.event.LocalizationEventType;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.ContainerLogsRetentionPolicy;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppFinishedEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.event.LogHandlerAppStartedEvent;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java Thu Nov 3 08:02:19 2011
@@ -35,12 +35,13 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.Dispatcher;
+import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogKey;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogValue;
+import org.apache.hadoop.yarn.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationEventType;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogKey;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogValue;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.AggregatedLogFormat.LogWriter;
import org.apache.hadoop.yarn.util.ConverterUtils;
public class AppLogAggregatorImpl implements AppLogAggregator {
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/LogAggregationService.java Thu Nov 3 08:02:19 2011
@@ -43,6 +43,8 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
+import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
+import org.apache.hadoop.yarn.logaggregation.LogAggregationUtils;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.DeletionService;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.loghandler.LogHandler;
@@ -138,83 +140,7 @@ public class LogAggregationService exten
super.stop();
}
- /**
- * Constructs the full filename for an application's log file per node.
- * @param remoteRootLogDir
- * @param appId
- * @param user
- * @param nodeId
- * @param suffix
- * @return the remote log file.
- */
- public static Path getRemoteNodeLogFileForApp(Path remoteRootLogDir,
- ApplicationId appId, String user, NodeId nodeId, String suffix) {
- return new Path(getRemoteAppLogDir(remoteRootLogDir, appId, user, suffix),
- getNodeString(nodeId));
- }
-
- /**
- * Gets the remote app log dir.
- * @param remoteRootLogDir
- * @param appId
- * @param user
- * @param suffix
- * @return the remote application specific log dir.
- */
- public static Path getRemoteAppLogDir(Path remoteRootLogDir,
- ApplicationId appId, String user, String suffix) {
- return new Path(getRemoteLogSuffixedDir(remoteRootLogDir, user, suffix),
- appId.toString());
- }
-
- /**
- * Gets the remote suffixed log dir for the user.
- * @param remoteRootLogDir
- * @param user
- * @param suffix
- * @return the remote suffixed log dir.
- */
- private static Path getRemoteLogSuffixedDir(Path remoteRootLogDir,
- String user, String suffix) {
- if (suffix == null || suffix.isEmpty()) {
- return getRemoteLogUserDir(remoteRootLogDir, user);
- }
- // TODO Maybe support suffix to be more than a single file.
- return new Path(getRemoteLogUserDir(remoteRootLogDir, user), suffix);
- }
-
- // TODO Add a utility method to list available log files. Ignore the
- // temporary ones.
- /**
- * Gets the remote log user dir.
- * @param remoteRootLogDir
- * @param user
- * @return the remote per user log dir.
- */
- private static Path getRemoteLogUserDir(Path remoteRootLogDir, String user) {
- return new Path(remoteRootLogDir, user);
- }
-
- /**
- * Returns the suffix component of the log dir.
- * @param conf
- * @return the suffix which will be appended to the user log dir.
- */
- public static String getRemoteNodeLogDirSuffix(Configuration conf) {
- return conf.get(YarnConfiguration.NM_REMOTE_APP_LOG_DIR_SUFFIX,
- YarnConfiguration.DEFAULT_NM_REMOTE_APP_LOG_DIR_SUFFIX);
- }
-
-
- /**
- * Converts a nodeId to a form used in the app log file name.
- * @param nodeId
- * @return the node string to be used to construct the file name.
- */
- private static String getNodeString(NodeId nodeId) {
- return nodeId.toString().replace(":", "_");
- }
@@ -268,7 +194,7 @@ public class LogAggregationService exten
}
Path getRemoteNodeLogFileForApp(ApplicationId appId, String user) {
- return LogAggregationService.getRemoteNodeLogFileForApp(
+ return LogAggregationUtils.getRemoteNodeLogFileForApp(
this.remoteRootLogDir, appId, user, this.nodeId,
this.remoteRootLogDirSuffix);
}
@@ -299,7 +225,7 @@ public class LogAggregationService exten
}
try {
userDir =
- getRemoteLogUserDir(
+ LogAggregationUtils.getRemoteLogUserDir(
LogAggregationService.this.remoteRootLogDir, user);
userDir =
userDir.makeQualified(remoteFS.getUri(),
@@ -312,7 +238,7 @@ public class LogAggregationService exten
}
try {
suffixDir =
- getRemoteLogSuffixedDir(
+ LogAggregationUtils.getRemoteLogSuffixedDir(
LogAggregationService.this.remoteRootLogDir, user,
LogAggregationService.this.remoteRootLogDirSuffix);
suffixDir =
@@ -326,8 +252,8 @@ public class LogAggregationService exten
}
try {
appDir =
- getRemoteAppLogDir(LogAggregationService.this.remoteRootLogDir,
- appId, user,
+ LogAggregationUtils.getRemoteAppLogDir(
+ LogAggregationService.this.remoteRootLogDir, appId, user,
LogAggregationService.this.remoteRootLogDirSuffix);
appDir =
appDir.makeQualified(remoteFS.getUri(),
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/event/LogHandlerAppStartedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/event/LogHandlerAppStartedEvent.java?rev=1196986&r1=1196985&r2=1196986&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/event/LogHandlerAppStartedEvent.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/loghandler/event/LogHandlerAppStartedEvent.java Thu Nov 3 08:02:19 2011
@@ -23,7 +23,7 @@ import java.util.Map;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.logaggregation.ContainerLogsRetentionPolicy;
+import org.apache.hadoop.yarn.logaggregation.ContainerLogsRetentionPolicy;
public class LogHandlerAppStartedEvent extends LogHandlerEvent {