You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ji...@apache.org on 2017/09/25 23:38:37 UTC
[38/86] [abbrv] hadoop git commit: YARN-7050. Post cleanup after
YARN-6903, removal of org.apache.slider package. Contributed by Jian He
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
index 647bfe9..a044838 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/ProviderUtils.java
@@ -24,30 +24,24 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
-import org.apache.slider.api.ClusterNode;
-import org.apache.slider.api.ResourceKeys;
-import org.apache.slider.api.RoleKeys;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.api.resource.Component;
-import org.apache.slider.api.resource.ConfigFile;
-import org.apache.slider.api.resource.Configuration;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
-import org.apache.hadoop.yarn.service.conf.SliderXmlConfKeys;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.common.tools.SliderUtils;
-import org.apache.slider.core.exceptions.BadCommandArgumentsException;
-import org.apache.slider.core.exceptions.SliderException;
-import org.apache.slider.core.launch.AbstractLauncher;
-import org.apache.slider.core.launch.ContainerLauncher;
-import org.apache.slider.core.registry.docstore.ConfigFormat;
-import org.apache.slider.core.registry.docstore.PublishedConfiguration;
-import org.apache.slider.core.registry.docstore.PublishedConfigurationOutputter;
-import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
import org.apache.hadoop.yarn.service.ServiceContext;
-import org.apache.slider.server.appmaster.state.StateAccessForProviders;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.api.records.Component;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.api.records.ConfigFormat;
+import org.apache.hadoop.yarn.service.api.records.Configuration;
+import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
+import org.apache.hadoop.yarn.service.exceptions.BadCommandArgumentsException;
+import org.apache.hadoop.yarn.service.exceptions.SliderException;
+import org.apache.hadoop.yarn.service.utils.PublishedConfiguration;
+import org.apache.hadoop.yarn.service.utils.PublishedConfigurationOutputter;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -55,23 +49,18 @@ import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.OutputStream;
-import java.util.ArrayList;
-import java.util.Collection;
import java.util.HashMap;
-import java.util.List;
-import java.util.Locale;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.regex.Pattern;
-import static org.apache.slider.api.ServiceApiConstants.*;
-import static org.apache.hadoop.yarn.service.utils.ServiceApiUtil.$;
+import static org.apache.hadoop.yarn.service.api.constants.ServiceApiConstants.*;
/**
* This is a factoring out of methods handy for providers. It's bonded to a log
* at construction time.
*/
-public class ProviderUtils implements RoleKeys, SliderKeys {
+public class ProviderUtils implements YarnServiceConstants {
protected static final Logger log =
LoggerFactory.getLogger(ProviderUtils.class);
@@ -174,46 +163,22 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
}
/**
- * Get resource requirements from a String value. If value isn't specified,
- * use the default value. If value is greater than max, use the max value.
- * @param val string value
- * @param defVal default value
- * @param maxVal maximum value
- * @return int resource requirement
- */
- public int getRoleResourceRequirement(String val,
- int defVal,
- int maxVal) {
- if (val==null) {
- val = Integer.toString(defVal);
- }
- Integer intVal;
- if (ResourceKeys.YARN_RESOURCE_MAX.equals(val)) {
- intVal = maxVal;
- } else {
- intVal = Integer.decode(val);
- }
- return intVal;
- }
-
-
- /**
* Localize the service keytabs for the application.
* @param launcher container launcher
* @param fileSystem file system
* @throws IOException trouble uploading to HDFS
*/
- public void localizeServiceKeytabs(ContainerLauncher launcher,
+ public void localizeServiceKeytabs(AbstractLauncher launcher,
SliderFileSystem fileSystem, Application application) throws IOException {
Configuration conf = application.getConfiguration();
String keytabPathOnHost =
- conf.getProperty(SliderXmlConfKeys.KEY_AM_KEYTAB_LOCAL_PATH);
+ conf.getProperty(YarnServiceConf.KEY_AM_KEYTAB_LOCAL_PATH);
if (SliderUtils.isUnset(keytabPathOnHost)) {
String amKeytabName =
- conf.getProperty(SliderXmlConfKeys.KEY_AM_LOGIN_KEYTAB_NAME);
+ conf.getProperty(YarnServiceConf.KEY_AM_LOGIN_KEYTAB_NAME);
String keytabDir =
- conf.getProperty(SliderXmlConfKeys.KEY_HDFS_KEYTAB_DIR);
+ conf.getProperty(YarnServiceConf.KEY_HDFS_KEYTAB_DIR);
// we need to localize the keytab files in the directory
Path keytabDirPath = fileSystem.buildKeytabPath(keytabDir, null,
application.getName());
@@ -434,38 +399,4 @@ public class ProviderUtils implements RoleKeys, SliderKeys {
String.valueOf(instance.getCompInstanceId().getId()));
return tokens;
}
-
- /**
- * Add ROLE_HOST tokens for substitution into config values.
- * @param tokens existing tokens
- * @param amState access to AM state
- */
- public static void addComponentHostTokens(Map<String, String> tokens,
- StateAccessForProviders amState) {
- if (amState == null) {
- return;
- }
- for (Map.Entry<String, Map<String, ClusterNode>> entry :
- amState.getRoleClusterNodeMapping().entrySet()) {
- String tokenName = entry.getKey().toUpperCase(Locale.ENGLISH) + "_HOST";
- String hosts = StringUtils .join(",",
- getHostsList(entry.getValue().values(), true));
- tokens.put($(tokenName), hosts);
- }
- }
-
- /**
- * Return a list of hosts based on current ClusterNodes.
- * @param values cluster nodes
- * @param hostOnly whether host or host/server name will be added to list
- * @return list of hosts
- */
- public static Iterable<String> getHostsList(Collection<ClusterNode> values,
- boolean hostOnly) {
- List<String> hosts = new ArrayList<>();
- for (ClusterNode cn : values) {
- hosts.add(hostOnly ? cn.host : cn.host + "/" + cn.name);
- }
- return hosts;
- }
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java
index 32cedb6..0920a9c 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultClientProvider.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.yarn.service.provider.defaultImpl;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
-import org.apache.slider.api.resource.Artifact;
-import org.apache.slider.api.resource.ConfigFile;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
import java.io.IOException;
import java.nio.file.Paths;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java
index a77214c..33f8278 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/defaultImpl/DefaultProviderService.java
@@ -19,9 +19,9 @@ package org.apache.hadoop.yarn.service.provider.defaultImpl;
import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
import org.apache.hadoop.yarn.service.provider.AbstractProviderService;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.core.launch.AbstractLauncher;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java
index c1f225c..d4a2254 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerClientProvider.java
@@ -19,16 +19,16 @@ package org.apache.hadoop.yarn.service.provider.docker;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileSystem;
-import org.apache.slider.api.resource.Artifact;
-import org.apache.slider.api.resource.ConfigFile;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
-import org.apache.slider.util.RestApiErrorMessages;
+import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages;
import java.io.IOException;
public class DockerClientProvider extends AbstractClientProvider
- implements SliderKeys {
+ implements YarnServiceConstants {
public DockerClientProvider() {
super();
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java
index c20eaad..236ddd9 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/docker/DockerProviderService.java
@@ -21,9 +21,9 @@ import org.apache.hadoop.registry.client.api.RegistryConstants;
import org.apache.hadoop.registry.client.binding.RegistryUtils;
import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
import org.apache.hadoop.yarn.service.provider.AbstractProviderService;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.core.launch.AbstractLauncher;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
import java.io.IOException;
import java.text.MessageFormat;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java
index 2b54be9..01f7b20 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballClientProvider.java
@@ -20,17 +20,17 @@ package org.apache.hadoop.yarn.service.provider.tarball;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.slider.api.resource.Artifact;
-import org.apache.slider.api.resource.ConfigFile;
-import org.apache.hadoop.yarn.service.conf.SliderKeys;
+import org.apache.hadoop.yarn.service.api.records.Artifact;
+import org.apache.hadoop.yarn.service.api.records.ConfigFile;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConstants;
import org.apache.hadoop.yarn.service.provider.AbstractClientProvider;
-import org.apache.slider.util.RestApiErrorMessages;
+import org.apache.hadoop.yarn.service.exceptions.RestApiErrorMessages;
import java.io.IOException;
import java.nio.file.Paths;
public class TarballClientProvider extends AbstractClientProvider
- implements SliderKeys {
+ implements YarnServiceConstants {
public TarballClientProvider() {
}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java
index 3c3d425..2403255 100644
--- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/provider/tarball/TarballProviderService.java
@@ -22,9 +22,9 @@ import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
import org.apache.hadoop.yarn.service.provider.AbstractProviderService;
-import org.apache.slider.api.resource.Application;
-import org.apache.slider.common.tools.SliderFileSystem;
-import org.apache.slider.core.launch.AbstractLauncher;
+import org.apache.hadoop.yarn.service.api.records.Application;
+import org.apache.hadoop.yarn.service.utils.SliderFileSystem;
+import org.apache.hadoop.yarn.service.containerlaunch.AbstractLauncher;
import java.io.IOException;
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java
new file mode 100644
index 0000000..56634f6
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/CustomRegistryConstants.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.registry;
+
+/**
+ * These are constants unique to the Slider AM
+ */
+public class CustomRegistryConstants {
+
+ public static final String MANAGEMENT_REST_API =
+ "classpath:org.apache.slider.management";
+
+ public static final String REGISTRY_REST_API =
+ "classpath:org.apache.slider.registry";
+
+ public static final String PUBLISHER_REST_API =
+ "classpath:org.apache.slider.publisher";
+
+ public static final String PUBLISHER_CONFIGURATIONS_API =
+ "classpath:org.apache.slider.publisher.configurations";
+
+ public static final String PUBLISHER_EXPORTS_API =
+ "classpath:org.apache.slider.publisher.exports";
+
+ public static final String PUBLISHER_DOCUMENTS_API =
+ "classpath:org.apache.slider.publisher.documents";
+
+ public static final String AGENT_SECURE_REST_API =
+ "classpath:org.apache.slider.agents.secure";
+
+ public static final String AGENT_ONEWAY_REST_API =
+ "classpath:org.apache.slider.agents.oneway";
+
+ public static final String AM_IPC_PROTOCOL =
+ "classpath:org.apache.slider.appmaster.ipc";
+
+ public static final String AM_REST_BASE =
+ "classpath:org.apache.slider.client.rest";
+
+ public static final String WEB_UI = "http://";
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
new file mode 100644
index 0000000..ef5ed91
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/registry/YarnRegistryViewForProviders.java
@@ -0,0 +1,225 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.registry;
+
+import com.google.common.base.Preconditions;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.PathNotFoundException;
+import org.apache.hadoop.registry.client.api.RegistryConstants;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.registry.client.api.BindFlags;
+import org.apache.hadoop.registry.client.api.RegistryOperations;
+import org.apache.hadoop.registry.client.binding.RegistryUtils;
+import org.apache.hadoop.registry.client.binding.RegistryPathUtils;
+
+import org.apache.hadoop.registry.client.types.ServiceRecord;
+import org.apache.hadoop.yarn.service.compinstance.ComponentInstanceId;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+
+import java.io.IOException;
+import java.util.List;
+
+import static org.apache.hadoop.registry.client.binding.RegistryPathUtils.join;
+
+/**
+ * Registry view for providers. This tracks where the service
+ * is registered, offers access to the record and other things.
+ */
+public class YarnRegistryViewForProviders {
+ private static final Log LOG =
+ LogFactory.getLog(YarnRegistryViewForProviders.class);
+
+ private final RegistryOperations registryOperations;
+ private final String user;
+ private final String sliderServiceClass;
+ private final String instanceName;
+ /**
+ * Record used where the service registered itself.
+ * Null until the service is registered
+ */
+ private ServiceRecord selfRegistration;
+
+ /**
+ * Path where record was registered
+ * Null until the service is registered
+ */
+ private String selfRegistrationPath;
+
+ public YarnRegistryViewForProviders(RegistryOperations registryOperations,
+ String user,
+ String sliderServiceClass,
+ String instanceName,
+ ApplicationAttemptId applicationAttemptId) {
+ Preconditions.checkArgument(registryOperations != null,
+ "null registry operations");
+ Preconditions.checkArgument(user != null, "null user");
+ Preconditions.checkArgument(SliderUtils.isSet(sliderServiceClass),
+ "unset service class");
+ Preconditions.checkArgument(SliderUtils.isSet(instanceName),
+ "instanceName");
+ Preconditions.checkArgument(applicationAttemptId != null,
+ "null applicationAttemptId");
+ this.registryOperations = registryOperations;
+ this.user = user;
+ this.sliderServiceClass = sliderServiceClass;
+ this.instanceName = instanceName;
+ }
+
+ public String getUser() {
+ return user;
+ }
+
+
+ private void setSelfRegistration(ServiceRecord selfRegistration) {
+ this.selfRegistration = selfRegistration;
+ }
+
+ /**
+ * Get the path to where the service has registered itself.
+ * Null until the service is registered
+ * @return the service registration path.
+ */
+ public String getSelfRegistrationPath() {
+ return selfRegistrationPath;
+ }
+
+ /**
+ * Get the absolute path to where the service has registered itself.
+ * This includes the base registry path
+ * Null until the service is registered
+ * @return the service registration path.
+ */
+ public String getAbsoluteSelfRegistrationPath() {
+ if (selfRegistrationPath == null) {
+ return null;
+ }
+ String root = registryOperations.getConfig().getTrimmed(
+ RegistryConstants.KEY_REGISTRY_ZK_ROOT,
+ RegistryConstants.DEFAULT_ZK_REGISTRY_ROOT);
+ return RegistryPathUtils.join(root, selfRegistrationPath);
+ }
+
+ /**
+ * Add a component under the slider name/entry
+ * @param componentName component name
+ * @param record record to put
+ * @throws IOException
+ */
+ public void putComponent(String componentName,
+ ServiceRecord record) throws
+ IOException {
+ putComponent(sliderServiceClass, instanceName,
+ componentName,
+ record);
+ }
+
+ /**
+ * Add a component
+ * @param serviceClass service class to use under ~user
+ * @param componentName component name
+ * @param record record to put
+ * @throws IOException
+ */
+ public void putComponent(String serviceClass,
+ String serviceName,
+ String componentName,
+ ServiceRecord record) throws IOException {
+ String path = RegistryUtils.componentPath(
+ user, serviceClass, serviceName, componentName);
+ registryOperations.mknode(RegistryPathUtils.parentOf(path), true);
+ registryOperations.bind(path, record, BindFlags.OVERWRITE);
+ }
+
+ /**
+ * Add a service under a path, optionally purging any history
+ * @param username user
+ * @param serviceClass service class to use under ~user
+ * @param serviceName name of the service
+ * @param record service record
+ * @param deleteTreeFirst perform recursive delete of the path first.
+ * @return the path the service was created at
+ * @throws IOException
+ */
+ public String putService(String username,
+ String serviceClass,
+ String serviceName,
+ ServiceRecord record,
+ boolean deleteTreeFirst) throws IOException {
+ String path = RegistryUtils.servicePath(
+ username, serviceClass, serviceName);
+ if (deleteTreeFirst) {
+ registryOperations.delete(path, true);
+ }
+ registryOperations.mknode(RegistryPathUtils.parentOf(path), true);
+ registryOperations.bind(path, record, BindFlags.OVERWRITE);
+ return path;
+ }
+
+ /**
+ * Add a service under a path for the current user
+ * @param record service record
+ * @param deleteTreeFirst perform recursive delete of the path first
+ * @return the path the service was created at
+ * @throws IOException
+ */
+ public String registerSelf(
+ ServiceRecord record,
+ boolean deleteTreeFirst) throws IOException {
+ selfRegistrationPath =
+ putService(user, sliderServiceClass, instanceName, record, deleteTreeFirst);
+ setSelfRegistration(record);
+ return selfRegistrationPath;
+ }
+
+ /**
+ * Delete a component
+ * @param containerId component name
+ * @throws IOException
+ */
+ public void deleteComponent(ComponentInstanceId instanceId,
+ String containerId) throws IOException {
+ String path = RegistryUtils.componentPath(
+ user, sliderServiceClass, instanceName,
+ containerId);
+ LOG.info(instanceId + ": Deleting registry path " + path);
+ registryOperations.delete(path, false);
+ }
+
+ /**
+ * Delete the children of a path -but not the path itself.
+ * It is not an error if the path does not exist
+ * @param path path to delete
+ * @param recursive flag to request recursive deletes
+ * @throws IOException IO problems
+ */
+ public void deleteChildren(String path, boolean recursive) throws IOException {
+ List<String> childNames = null;
+ try {
+ childNames = registryOperations.list(path);
+ } catch (PathNotFoundException e) {
+ return;
+ }
+ for (String childName : childNames) {
+ String child = join(path, childName);
+ registryOperations.delete(child, recursive);
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/BaseRestClient.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/BaseRestClient.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/BaseRestClient.java
new file mode 100644
index 0000000..2d01bef
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/BaseRestClient.java
@@ -0,0 +1,149 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.rest;
+
+import com.google.common.base.Preconditions;
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.ClientHandlerException;
+import com.sun.jersey.api.client.GenericType;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.WebResource;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.ws.rs.core.MediaType;
+import java.io.IOException;
+import java.net.URI;
+
+
+/**
+ * This is a base class for Jersey REST clients in Slider.
+ * It supports the execution of operations —with
+ * exceptions uprated to IOExceptions when needed.
+ * <p>
+ * Subclasses can use these operations to provide an API-like view
+ * of the REST model
+ */
+public class BaseRestClient {
+ private static final Logger log =
+ LoggerFactory.getLogger(BaseRestClient.class);
+ private final Client client;
+
+ public BaseRestClient(
+ Client client) {
+ Preconditions.checkNotNull(client, "null jersey client");
+ this.client = client;
+ }
+
+ /**
+ * Get the jersey client
+ * @return jersey client
+ */
+ public Client getClient() {
+ return client;
+ }
+
+ /**
+ * Execute the operation. Failures are raised as IOException subclasses
+ * @param method method to execute
+ * @param resource resource to work against
+ * @param c class to build
+ * @param <T> type expected
+ * @return an instance of the type T
+ * @throws IOException on any failure
+ */
+ public <T> T exec(HttpVerb method, WebResource resource, Class<T> c)
+ throws IOException {
+ try {
+ Preconditions.checkArgument(c != null);
+ log.debug("{}} {}", method, resource.getURI());
+ return resource.accept(MediaType.APPLICATION_JSON_TYPE)
+ .method(method.getVerb(), c);
+ } catch (ClientHandlerException ex) {
+ throw ExceptionConverter.convertJerseyException(method.getVerb(),
+ resource.getURI().toString(),
+ ex);
+ } catch (UniformInterfaceException ex) {
+ throw UgiJerseyBinding.uprateFaults(method,
+ resource.getURI().toString(),
+ ex);
+ }
+ }
+
+ /**
+ * Execute the operation. Failures are raised as IOException subclasses
+ * @param method method to execute
+ * @param resource resource to work against
+ * @param t type to work with
+ * @param <T> type expected
+ * @return an instance of the type T
+ * @throws IOException on any failure
+ */
+ public <T> T exec(HttpVerb method, WebResource resource, GenericType<T> t)
+ throws IOException {
+ try {
+ Preconditions.checkArgument(t != null);
+ log.debug("{}} {}", method, resource.getURI());
+ resource.accept(MediaType.APPLICATION_JSON_TYPE);
+ return resource.method(method.getVerb(), t);
+ } catch (ClientHandlerException ex) {
+ throw ExceptionConverter.convertJerseyException(method.getVerb(),
+ resource.getURI().toString(),
+ ex);
+ } catch (UniformInterfaceException ex) {
+ throw UgiJerseyBinding.uprateFaults(method, resource.getURI().toString(),
+ ex);
+ }
+ }
+
+
+ /**
+ * Execute the GET operation. Failures are raised as IOException subclasses
+ * @param resource resource to work against
+ * @param c class to build
+ * @param <T> type expected
+ * @return an instance of the type T
+ * @throws IOException on any failure
+ */
+ public <T> T get(WebResource resource, Class<T> c) throws IOException {
+ return exec(HttpVerb.GET, resource, c);
+ }
+
+ /**
+ * Create a Web resource from the client.
+ *
+ * @param u the URI of the resource.
+ * @return the Web resource.
+ */
+ public WebResource resource(URI u) {
+ return client.resource(u);
+ }
+
+ /**
+ * Create a Web resource from the client.
+ *
+ * @param url the URI of the resource.
+ * @return the Web resource.
+ */
+
+ public WebResource resource(String url) {
+ return client.resource(url);
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/ExceptionConverter.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/ExceptionConverter.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/ExceptionConverter.java
new file mode 100644
index 0000000..12fdc79
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/ExceptionConverter.java
@@ -0,0 +1,128 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.rest;
+
+import com.sun.jersey.api.client.ClientHandlerException;
+import com.sun.jersey.api.client.ClientResponse;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import org.apache.hadoop.fs.InvalidRequestException;
+import org.apache.hadoop.fs.PathAccessDeniedException;
+import org.apache.hadoop.fs.PathIOException;
+import org.apache.hadoop.yarn.webapp.*;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.servlet.http.HttpServletResponse;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+
+/**
+ * static methods to convert exceptions into different types, including
+ * extraction of details and finer-grained conversions.
+ */
+public class ExceptionConverter {
+ private static final Logger
+ log = LoggerFactory.getLogger(ExceptionConverter.class);
+
+ /**
+ * Uprate error codes 400 and up into faults;
+ * 404 is converted to a {@link FileNotFoundException},
+ * 401 to {@link ForbiddenException}
+ * FileNotFoundException for an unknown resource
+ * PathAccessDeniedException for access denied
+ * PathIOException for anything else
+ * @param verb HTTP Verb used
+ * @param targetURL URL being targeted
+ * @param exception original exception
+ * @return a new exception, the original one nested as a cause
+ */
+ public static IOException convertJerseyException(String verb,
+ String targetURL,
+ UniformInterfaceException exception) {
+
+ IOException ioe = null;
+ ClientResponse response = exception.getResponse();
+ if (response != null) {
+ int status = response.getStatus();
+ String body = "";
+ try {
+ if (response.hasEntity()) {
+ body = response.getEntity(String.class);
+ log.error("{} {} returned status {} and body\n{}",
+ verb, targetURL, status, body);
+ } else {
+ log.error("{} {} returned status {} and empty body",
+ verb, targetURL, status);
+ }
+ } catch (Exception e) {
+ log.warn("Failed to extract body from client response", e);
+ }
+
+ if (status == HttpServletResponse.SC_UNAUTHORIZED
+ || status == HttpServletResponse.SC_FORBIDDEN) {
+ ioe = new PathAccessDeniedException(targetURL);
+ } else if (status == HttpServletResponse.SC_BAD_REQUEST
+ || status == HttpServletResponse.SC_NOT_ACCEPTABLE
+ || status == HttpServletResponse.SC_UNSUPPORTED_MEDIA_TYPE) {
+ // bad request
+ ioe = new InvalidRequestException(
+ String.format("Bad %s request: status code %d against %s",
+ verb, status, targetURL));
+ } else if (status > 400 && status < 500) {
+ ioe = new FileNotFoundException(targetURL);
+ }
+ if (ioe == null) {
+ ioe = new PathIOException(targetURL,
+ verb + " " + targetURL
+ + " failed with status code : " + status
+ + ":" + exception);
+ }
+ } else {
+ ioe = new PathIOException(targetURL,
+ verb + " " + targetURL + " failed: " + exception);
+ }
+ ioe.initCause(exception);
+ return ioe;
+ }
+
+ /**
+ * Handle a client-side Jersey exception.
+ * <p>
+ * If there's an inner IOException, return that.
+ * <p>
+ * Otherwise: create a new wrapper IOE including verb and target details
+ * @param verb HTTP Verb used
+ * @param targetURL URL being targeted
+ * @param exception original exception
+ * @return an exception to throw
+ */
+ public static IOException convertJerseyException(String verb,
+ String targetURL,
+ ClientHandlerException exception) {
+ if (exception.getCause() instanceof IOException) {
+ return (IOException)exception.getCause();
+ } else {
+ IOException ioe = new IOException(
+ verb + " " + targetURL + " failed: " + exception);
+ ioe.initCause(exception);
+ return ioe;
+ }
+ }
+
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/HttpVerb.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/HttpVerb.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/HttpVerb.java
new file mode 100644
index 0000000..93f9082
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/HttpVerb.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.rest;
+
+/**
+ * Http verbs with details on what they support in terms of submit and
+ * response bodies.
+ * <p>
+ * Those verbs which do support bodies in the response MAY NOT return it;
+ * if the response code is 204 then the answer is "no body", but the operation
+ * is considered a success.
+ */
+public enum HttpVerb {
+ GET("GET", false, true),
+ POST("POST", true, true),
+ PUT("PUT", true, true),
+ DELETE("DELETE", false, true),
+ HEAD("HEAD", false, false);
+
+ private final String verb;
+ private final boolean hasUploadBody;
+ private final boolean hasResponseBody;
+
+ HttpVerb(String verb, boolean hasUploadBody, boolean hasResponseBody) {
+ this.verb = verb;
+ this.hasUploadBody = hasUploadBody;
+ this.hasResponseBody = hasResponseBody;
+ }
+
+ public String getVerb() {
+ return verb;
+ }
+
+ public boolean hasUploadBody() {
+ return hasUploadBody;
+ }
+
+ public boolean hasResponseBody() {
+ return hasResponseBody;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/SliderURLConnectionFactory.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/SliderURLConnectionFactory.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/SliderURLConnectionFactory.java
new file mode 100644
index 0000000..fcd7f55
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/SliderURLConnectionFactory.java
@@ -0,0 +1,176 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.rest;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.web.KerberosUgiAuthenticator;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticatedURL;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
+import org.apache.hadoop.security.ssl.SSLFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import javax.net.ssl.HostnameVerifier;
+import javax.net.ssl.HttpsURLConnection;
+import javax.net.ssl.SSLSocketFactory;
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.net.URLConnection;
+import java.security.GeneralSecurityException;
+
+/**
+ * Factory for URL connections; used behind the scenes in the Jersey integration.
+ * <p>
+ * Derived from the WebHDFS implementation.
+ */
+public class SliderURLConnectionFactory {
+ private static final Logger log =
+ LoggerFactory.getLogger(SliderURLConnectionFactory.class);
+
+ /**
+ * Timeout for socket connects and reads
+ */
+ public final static int DEFAULT_SOCKET_TIMEOUT = 60 * 1000; // 1 minute
+ private final ConnectionConfigurator connConfigurator;
+
+ private static final ConnectionConfigurator DEFAULT_CONFIGURATOR = new BasicConfigurator();
+
+ /**
+ * Construct a new URLConnectionFactory based on the configuration. It will
+ * try to load SSL certificates when it is specified.
+ */
+ public static SliderURLConnectionFactory newInstance(Configuration conf) {
+ ConnectionConfigurator conn;
+ try {
+ conn = newSslConnConfigurator(DEFAULT_SOCKET_TIMEOUT, conf);
+ } catch (Exception e) {
+ log.debug("Cannot load customized SSL configuration.", e);
+ conn = DEFAULT_CONFIGURATOR;
+ }
+ return new SliderURLConnectionFactory(conn);
+ }
+
+ private SliderURLConnectionFactory(ConnectionConfigurator connConfigurator) {
+ this.connConfigurator = connConfigurator;
+ }
+
+ /**
+ * Create a new ConnectionConfigurator for SSL connections
+ */
+ private static ConnectionConfigurator newSslConnConfigurator(final int timeout,
+ Configuration conf) throws IOException, GeneralSecurityException {
+ final SSLFactory factory;
+ final SSLSocketFactory sf;
+ final HostnameVerifier hv;
+
+ factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf);
+ factory.init();
+ sf = factory.createSSLSocketFactory();
+ hv = factory.getHostnameVerifier();
+
+ return new ConnectionConfigurator() {
+ @Override
+ public HttpURLConnection configure(HttpURLConnection conn)
+ throws IOException {
+ if (conn instanceof HttpsURLConnection) {
+ HttpsURLConnection c = (HttpsURLConnection) conn;
+ c.setSSLSocketFactory(sf);
+ c.setHostnameVerifier(hv);
+ }
+ SliderURLConnectionFactory.setupConnection(conn, timeout);
+ return conn;
+ }
+ };
+ }
+
+ /**
+ * Opens a url with read and connect timeouts
+ *
+ * @param url
+ * to open
+ * @return URLConnection
+ * @throws IOException
+ */
+ public URLConnection openConnection(URL url) throws IOException {
+ try {
+ return openConnection(url, false);
+ } catch (AuthenticationException e) {
+ // Unreachable
+ return null;
+ }
+ }
+
+ /**
+ * Opens a url with read and connect timeouts
+ *
+ * @param url
+ * URL to open
+ * @param isSpnego
+ * whether the url should be authenticated via SPNEGO
+ * @return URLConnection
+ * @throws IOException
+ * @throws AuthenticationException
+ */
+ public URLConnection openConnection(URL url, boolean isSpnego)
+ throws IOException, AuthenticationException {
+ if (isSpnego) {
+ log.debug("open AuthenticatedURL connection {}", url);
+ UserGroupInformation.getCurrentUser().checkTGTAndReloginFromKeytab();
+ final AuthenticatedURL.Token authToken = new AuthenticatedURL.Token();
+ return new AuthenticatedURL(new KerberosUgiAuthenticator(),
+ connConfigurator).openConnection(url, authToken);
+ } else {
+ log.debug("open URL connection {}", url);
+ URLConnection connection = url.openConnection();
+ if (connection instanceof HttpURLConnection) {
+ connConfigurator.configure((HttpURLConnection) connection);
+ }
+ return connection;
+ }
+ }
+
+ /**
+ * Sets connection parameters on the given URLConnection
+ *
+ * @param connection
+ * URLConnection to set
+ * @param socketTimeout
+ * the connection and read timeout of the connection.
+ */
+ private static void setupConnection(URLConnection connection, int socketTimeout) {
+ connection.setConnectTimeout(socketTimeout);
+ connection.setReadTimeout(socketTimeout);
+ connection.setUseCaches(false);
+ if (connection instanceof HttpURLConnection) {
+ ((HttpURLConnection) connection).setInstanceFollowRedirects(true);
+ }
+ }
+
+ private static class BasicConfigurator implements ConnectionConfigurator {
+ @Override
+ public HttpURLConnection configure(HttpURLConnection conn)
+ throws IOException {
+ SliderURLConnectionFactory.setupConnection(conn, DEFAULT_SOCKET_TIMEOUT);
+ return conn;
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UgiJerseyBinding.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UgiJerseyBinding.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UgiJerseyBinding.java
new file mode 100644
index 0000000..b3fdef9
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UgiJerseyBinding.java
@@ -0,0 +1,153 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.rest;
+
+import com.google.common.base.Preconditions;
+import com.sun.jersey.api.client.Client;
+import com.sun.jersey.api.client.UniformInterfaceException;
+import com.sun.jersey.api.client.config.ClientConfig;
+import com.sun.jersey.api.client.config.DefaultClientConfig;
+import com.sun.jersey.api.json.JSONConfiguration;
+import com.sun.jersey.client.urlconnection.HttpURLConnectionFactory;
+import com.sun.jersey.client.urlconnection.URLConnectionClientHandler;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+/**
+ * Class to bond to a Jersey client, for UGI integration and SPNEGO.
+ * <p>
+ * Usage: create an instance, then when creating a Jersey <code>Client</code>
+ * pass in to the constructor the handler provided by {@link #getHandler()}
+ *
+ * see <a href="https://jersey.java.net/apidocs/1.17/jersey/com/sun/jersey/client/urlconnection/HttpURLConnectionFactory.html">Jersey docs</a>
+ */
+public class UgiJerseyBinding implements
+ HttpURLConnectionFactory {
+ private static final Logger log =
+ LoggerFactory.getLogger(UgiJerseyBinding.class);
+
+ private final UrlConnectionOperations operations;
+ private final URLConnectionClientHandler handler;
+
+ /**
+ * Construct an instance
+ * @param operations operations instance
+ */
+ @SuppressWarnings("ThisEscapedInObjectConstruction")
+ public UgiJerseyBinding(UrlConnectionOperations operations) {
+ Preconditions.checkArgument(operations != null, "Null operations");
+ this.operations = operations;
+ handler = new URLConnectionClientHandler(this);
+ }
+
+ /**
+ * Create an instance off the configuration. The SPNEGO policy
+ * is derived from the current UGI settings.
+ * @param conf config
+ */
+ public UgiJerseyBinding(Configuration conf) {
+ this(new UrlConnectionOperations(conf));
+ }
+
+ /**
+ * Get a URL connection.
+ * @param url URL to connect to
+ * @return the connection
+ * @throws IOException any problem. {@link AuthenticationException}
+ * errors are wrapped
+ */
+ @Override
+ public HttpURLConnection getHttpURLConnection(URL url) throws IOException {
+ try {
+ // open a connection handling status codes and so redirections
+ // but as it opens a connection, it's less useful than you think.
+
+ return operations.openConnection(url);
+ } catch (AuthenticationException e) {
+ throw new IOException(e);
+ }
+ }
+
+ public UrlConnectionOperations getOperations() {
+ return operations;
+ }
+
+ public URLConnectionClientHandler getHandler() {
+ return handler;
+ }
+
+ /**
+ * Get the SPNEGO flag (as found in the operations instance
+ * @return the spnego policy
+ */
+ public boolean isUseSpnego() {
+ return operations.isUseSpnego();
+ }
+
+
+ /**
+ * Uprate error codes 400 and up into faults;
+ * <p>
+ * see {@link ExceptionConverter#convertJerseyException(String, String, UniformInterfaceException)}
+ */
+ public static IOException uprateFaults(HttpVerb verb, String url,
+ UniformInterfaceException ex)
+ throws IOException {
+ return ExceptionConverter.convertJerseyException(verb.getVerb(),
+ url, ex);
+ }
+
+ /**
+ * Create the standard Jersey client Config
+ * @return the recommended Jersey Client config
+ */
+ public ClientConfig createJerseyClientConfig() {
+ ClientConfig clientConfig = new DefaultClientConfig();
+ clientConfig.getFeatures().put(JSONConfiguration.FEATURE_POJO_MAPPING, true);
+ return clientConfig;
+ }
+
+ /**
+ * Create a jersey client bonded to this handler, using the
+ * supplied client config
+ * @param clientConfig client configuratin
+ * @return a new client instance to use
+ */
+ public Client createJerseyClient(ClientConfig clientConfig) {
+ return new Client(getHandler(), clientConfig);
+ }
+
+ /**
+ * Create a jersey client bonded to this handler, using the
+ * client config created with {@link #createJerseyClientConfig()}
+ * @return a new client instance to use
+ */
+ public Client createJerseyClient() {
+ return createJerseyClient(createJerseyClientConfig());
+ }
+
+}
+
+
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UrlConnectionOperations.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UrlConnectionOperations.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UrlConnectionOperations.java
new file mode 100644
index 0000000..d7f768e
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/rest/UrlConnectionOperations.java
@@ -0,0 +1,83 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.rest;
+
+import com.google.common.base.Preconditions;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.authentication.client.AuthenticationException;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+
+/**
+ * Operations on the JDK UrlConnection class.
+ *
+ */
+public class UrlConnectionOperations extends Configured {
+ private static final Logger log =
+ LoggerFactory.getLogger(UrlConnectionOperations.class);
+
+ private SliderURLConnectionFactory connectionFactory;
+
+ private boolean useSpnego = false;
+
+ /**
+ * Create an instance off the configuration. The SPNEGO policy
+ * is derived from the current UGI settings.
+ * @param conf config
+ */
+ public UrlConnectionOperations(Configuration conf) {
+ super(conf);
+ connectionFactory = SliderURLConnectionFactory.newInstance(conf);
+ if (UserGroupInformation.isSecurityEnabled()) {
+ log.debug("SPNEGO is enabled");
+ setUseSpnego(true);
+ }
+ }
+
+
+ public boolean isUseSpnego() {
+ return useSpnego;
+ }
+
+ public void setUseSpnego(boolean useSpnego) {
+ this.useSpnego = useSpnego;
+ }
+
+ /**
+ * Opens a url with cache disabled, redirect handled in
+ * (JDK) implementation.
+ *
+ * @param url to open
+ * @return URLConnection
+ * @throws IOException
+ * @throws AuthenticationException authentication failure
+ */
+ public HttpURLConnection openConnection(URL url) throws
+ IOException,
+ AuthenticationException {
+ Preconditions.checkArgument(url.getPort() != 0, "no port");
+ return (HttpURLConnection) connectionFactory.openConnection(url, useSpnego);
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/ServiceMonitor.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/ServiceMonitor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/ServiceMonitor.java
new file mode 100644
index 0000000..98a76ea
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/ServiceMonitor.java
@@ -0,0 +1,148 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.servicemonitor;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.service.AbstractService;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.service.ServiceContext;
+import org.apache.hadoop.yarn.service.component.Component;
+import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
+import org.apache.hadoop.yarn.service.conf.YarnServiceConf;
+import org.apache.hadoop.yarn.service.component.ComponentEvent;
+import org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEvent;
+import org.apache.hadoop.yarn.service.component.ComponentState;
+import org.apache.hadoop.yarn.service.servicemonitor.probe.ProbeStatus;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Map;
+import java.util.concurrent.Executors;
+import java.util.concurrent.ScheduledExecutorService;
+import java.util.concurrent.TimeUnit;
+
+import static org.apache.hadoop.yarn.service.compinstance.ComponentInstanceState.RUNNING_BUT_UNREADY;
+import static org.apache.hadoop.yarn.service.component.ComponentEventType.FLEX;
+import static org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEventType.BECOME_NOT_READY;
+import static org.apache.hadoop.yarn.service.compinstance.ComponentInstanceEventType.BECOME_READY;
+import static org.apache.hadoop.yarn.service.compinstance.ComponentInstanceState.READY;
+import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.CONTAINER_FAILURE_WINDOW;
+import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.DEFAULT_READINESS_CHECK_INTERVAL;
+import static org.apache.hadoop.yarn.service.conf.YarnServiceConf.READINESS_CHECK_INTERVAL;
+
+public class ServiceMonitor extends AbstractService {
+
+ private static final Logger LOG =
+ LoggerFactory.getLogger(ServiceMonitor.class);
+
+ public ScheduledExecutorService executorService;
+ private Map<ContainerId, ComponentInstance> liveInstances = null;
+ private ServiceContext context;
+ private Configuration conf;
+
+ public ServiceMonitor(String name, ServiceContext context) {
+ super(name);
+ liveInstances = context.scheduler.getLiveInstances();
+ this.context = context;
+ }
+
+ @Override
+ public void serviceInit(Configuration conf) throws Exception {
+ executorService = Executors.newScheduledThreadPool(1);
+ this.conf = conf;
+ super.serviceInit(conf);
+ }
+
+ @Override
+ public void serviceStart() throws Exception {
+ long readinessCheckInterval = YarnServiceConf
+ .getLong(READINESS_CHECK_INTERVAL, DEFAULT_READINESS_CHECK_INTERVAL,
+ context.application.getConfiguration(), conf);
+
+ executorService
+ .scheduleAtFixedRate(new ReadinessChecker(), readinessCheckInterval,
+ readinessCheckInterval, TimeUnit.SECONDS);
+
+ // Default 6 hours.
+ long failureResetInterval = YarnServiceConf
+ .getLong(CONTAINER_FAILURE_WINDOW, 21600,
+ context.application.getConfiguration(), conf);
+
+ executorService
+ .scheduleAtFixedRate(new ContainerFailureReset(), failureResetInterval,
+ failureResetInterval, TimeUnit.SECONDS);
+ }
+
+ @Override
+ public void serviceStop() throws Exception {
+ if (executorService != null) {
+ executorService.shutdownNow();
+ }
+ }
+
+ private class ReadinessChecker implements Runnable {
+
+ @Override
+ public void run() {
+
+ // check if the comp instance are ready
+ for (Map.Entry<ContainerId, ComponentInstance> entry : liveInstances
+ .entrySet()) {
+ ComponentInstance instance = entry.getValue();
+
+ ProbeStatus status = instance.ping();
+ if (status.isSuccess()) {
+ if (instance.getState() == RUNNING_BUT_UNREADY) {
+ // synchronously update the state.
+ instance.handle(
+ new ComponentInstanceEvent(entry.getKey(), BECOME_READY));
+ }
+ } else {
+ if (instance.getState() == READY) {
+ instance.handle(
+ new ComponentInstanceEvent(entry.getKey(), BECOME_NOT_READY));
+ }
+ }
+ }
+
+ for (Component component : context.scheduler.getAllComponents()
+ .values()) {
+ // If comp hasn't started yet and its dependencies are satisfied
+ if (component.getState() == ComponentState.INIT && component
+ .areDependenciesReady()) {
+ LOG.info("[COMPONENT {}]: Dependencies satisfied, ramping up.",
+ component.getName());
+ ComponentEvent event = new ComponentEvent(component.getName(), FLEX)
+ .setDesired(component.getComponentSpec().getNumberOfContainers());
+ component.handle(event);
+ }
+ }
+ }
+ }
+
+ private class ContainerFailureReset implements Runnable {
+ @Override
+ public void run() {
+ for (Component component : context.scheduler.getAllComponents().values()) {
+ component.resetCompFailureCount();
+ }
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/HttpProbe.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/HttpProbe.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/HttpProbe.java
new file mode 100644
index 0000000..10c1160
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/HttpProbe.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.servicemonitor.probe;
+
+import org.apache.commons.lang.StringUtils;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
+import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.util.Map;
+
+public class HttpProbe extends Probe {
+ protected static final Logger log = LoggerFactory.getLogger(HttpProbe.class);
+
+ private static final String HOST_TOKEN = "${THIS_HOST}";
+
+ private final String urlString;
+ private final int timeout;
+ private final int min, max;
+
+
+ public HttpProbe(String url, int timeout, int min, int max, Configuration
+ conf) {
+ super("Http probe of " + url + " [" + min + "-" + max + "]", conf);
+ this.urlString = url;
+ this.timeout = timeout;
+ this.min = min;
+ this.max = max;
+ }
+
+ public static HttpProbe create(Map<String, String> props)
+ throws IOException {
+ String urlString = getProperty(props, WEB_PROBE_URL, null);
+ new URL(urlString);
+ int timeout = getPropertyInt(props, WEB_PROBE_CONNECT_TIMEOUT,
+ WEB_PROBE_CONNECT_TIMEOUT_DEFAULT);
+ int minSuccess = getPropertyInt(props, WEB_PROBE_MIN_SUCCESS,
+ WEB_PROBE_MIN_SUCCESS_DEFAULT);
+ int maxSuccess = getPropertyInt(props, WEB_PROBE_MAX_SUCCESS,
+ WEB_PROBE_MAX_SUCCESS_DEFAULT);
+ return new HttpProbe(urlString, timeout, minSuccess, maxSuccess, null);
+ }
+
+
+ private static HttpURLConnection getConnection(URL url, int timeout) throws
+ IOException {
+ HttpURLConnection connection = (HttpURLConnection) url.openConnection();
+ connection.setInstanceFollowRedirects(true);
+ connection.setConnectTimeout(timeout);
+ return connection;
+ }
+
+ @Override
+ public ProbeStatus ping(ComponentInstance instance) {
+ ProbeStatus status = new ProbeStatus();
+ ContainerStatus containerStatus = instance.getContainerStatus();
+ if (containerStatus == null || SliderUtils.isEmpty(containerStatus.getIPs())
+ || StringUtils.isEmpty(containerStatus.getHost())) {
+ status.fail(this, new IOException("IP is not available yet"));
+ return status;
+ }
+
+ String ip = containerStatus.getIPs().get(0);
+ HttpURLConnection connection = null;
+ try {
+ URL url = new URL(urlString.replace(HOST_TOKEN, ip));
+ connection = getConnection(url, this.timeout);
+ int rc = connection.getResponseCode();
+ if (rc < min || rc > max) {
+ String error = "Probe " + url + " error code: " + rc;
+ log.info(error);
+ status.fail(this,
+ new IOException(error));
+ } else {
+ status.succeed(this);
+ }
+ } catch (Throwable e) {
+ String error = "Probe " + urlString + " failed for IP " + ip + ": " + e;
+ log.info(error, e);
+ status.fail(this,
+ new IOException(error, e));
+ } finally {
+ if (connection != null) {
+ connection.disconnect();
+ }
+ }
+ return status;
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/LogEntryBuilder.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/LogEntryBuilder.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/LogEntryBuilder.java
new file mode 100644
index 0000000..b575d69
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/LogEntryBuilder.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.servicemonitor.probe;
+
+/**
+ * Build up log entries for ease of splunk
+ */
+public class LogEntryBuilder {
+
+ private final StringBuilder builder = new StringBuilder();
+
+ public LogEntryBuilder() {
+ }
+
+ public LogEntryBuilder(String text) {
+ elt(text);
+ }
+
+
+ public LogEntryBuilder(String name, Object value) {
+ entry(name, value);
+ }
+
+ public LogEntryBuilder elt(String text) {
+ addComma();
+ builder.append(text);
+ return this;
+ }
+
+ public LogEntryBuilder elt(String name, Object value) {
+ addComma();
+ entry(name, value);
+ return this;
+ }
+
+ private void addComma() {
+ if (!isEmpty()) {
+ builder.append(", ");
+ }
+ }
+
+ private void entry(String name, Object value) {
+ builder.append(name).append('=');
+ if (value != null) {
+ builder.append('"').append(value.toString()).append('"');
+ } else {
+ builder.append("null");
+ }
+ }
+
+ @Override
+ public String toString() {
+ return builder.toString();
+ }
+
+ private boolean isEmpty() {
+ return builder.length() == 0;
+ }
+
+
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorKeys.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorKeys.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorKeys.java
new file mode 100644
index 0000000..f5f3d99
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorKeys.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.servicemonitor.probe;
+
+/**
+ * Config keys for monitoring
+ */
+public interface MonitorKeys {
+
+ /**
+ * Port probing key : port to attempt to create a TCP connection to {@value}.
+ */
+ String PORT_PROBE_PORT = "port";
+ /**
+ * Port probing key : timeout for the the connection attempt {@value}.
+ */
+ String PORT_PROBE_CONNECT_TIMEOUT = "timeout";
+ /**
+ * Port probing default : timeout for the connection attempt {@value}.
+ */
+ int PORT_PROBE_CONNECT_TIMEOUT_DEFAULT = 1000;
+
+ /**
+ * Web probing key : URL {@value}.
+ */
+ String WEB_PROBE_URL = "url";
+ /**
+ * Web probing key : min success code {@value}.
+ */
+ String WEB_PROBE_MIN_SUCCESS = "min.success";
+ /**
+ * Web probing key : max success code {@value}.
+ */
+ String WEB_PROBE_MAX_SUCCESS = "max.success";
+ /**
+ * Web probing default : min successful response code {@value}.
+ */
+ int WEB_PROBE_MIN_SUCCESS_DEFAULT = 200;
+ /**
+ * Web probing default : max successful response code {@value}.
+ */
+ int WEB_PROBE_MAX_SUCCESS_DEFAULT = 299;
+ /**
+ * Web probing key : timeout for the connection attempt {@value}
+ */
+ String WEB_PROBE_CONNECT_TIMEOUT = "timeout";
+ /**
+ * Port probing default : timeout for the connection attempt {@value}.
+ */
+ int WEB_PROBE_CONNECT_TIMEOUT_DEFAULT = 1000;
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorUtils.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorUtils.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorUtils.java
new file mode 100644
index 0000000..46d1fdb
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/MonitorUtils.java
@@ -0,0 +1,84 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.servicemonitor.probe;
+
+import org.apache.hadoop.yarn.service.api.records.ReadinessCheck;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.util.Formatter;
+import java.util.Locale;
+
+/**
+ * Various utils to work with the monitor
+ */
+public final class MonitorUtils {
+ protected static final Logger LOG = LoggerFactory.getLogger(MonitorUtils
+ .class);
+
+ private MonitorUtils() {
+ }
+
+ public static String toPlural(int val) {
+ return val != 1 ? "s" : "";
+ }
+
+ /**
+ * Convert milliseconds to human time -the exact format is unspecified
+ * @param milliseconds a time in milliseconds
+ * @return a time that is converted to human intervals
+ */
+ public static String millisToHumanTime(long milliseconds) {
+ StringBuilder sb = new StringBuilder();
+ // Send all output to the Appendable object sb
+ Formatter formatter = new Formatter(sb, Locale.US);
+
+ long s = Math.abs(milliseconds / 1000);
+ long m = Math.abs(milliseconds % 1000);
+ if (milliseconds > 0) {
+ formatter.format("%d.%03ds", s, m);
+ } else if (milliseconds == 0) {
+ formatter.format("0");
+ } else {
+ formatter.format("-%d.%03ds", s, m);
+ }
+ return sb.toString();
+ }
+
+ public static Probe getProbe(ReadinessCheck readinessCheck) {
+ if (readinessCheck == null) {
+ return null;
+ }
+ if (readinessCheck.getType() == null) {
+ return null;
+ }
+ try {
+ switch (readinessCheck.getType()) {
+ case HTTP:
+ return HttpProbe.create(readinessCheck.getProps());
+ case PORT:
+ return PortProbe.create(readinessCheck.getProps());
+ default:
+ return null;
+ }
+ } catch (Throwable t) {
+ throw new IllegalArgumentException("Error creating readiness check " +
+ t);
+ }
+ }
+}
http://git-wip-us.apache.org/repos/asf/hadoop/blob/727e6d78/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/PortProbe.java
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/PortProbe.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/PortProbe.java
new file mode 100644
index 0000000..f6cf3ae
--- /dev/null
+++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-slider/hadoop-yarn-slider-core/src/main/java/org/apache/hadoop/yarn/service/servicemonitor/probe/PortProbe.java
@@ -0,0 +1,98 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.service.servicemonitor.probe;
+
+import org.apache.hadoop.io.IOUtils;
+import org.apache.hadoop.yarn.service.compinstance.ComponentInstance;
+import org.apache.hadoop.yarn.service.utils.SliderUtils;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.Socket;
+import java.util.Map;
+
+/**
+ * Probe for a port being open.
+ */
+public class PortProbe extends Probe {
+ protected static final Logger log = LoggerFactory.getLogger(PortProbe.class);
+ private final int port;
+ private final int timeout;
+
+ public PortProbe(int port, int timeout) {
+ super("Port probe of " + port + " for " + timeout + "ms", null);
+ this.port = port;
+ this.timeout = timeout;
+ }
+
+ public static PortProbe create(Map<String, String> props)
+ throws IOException {
+ int port = getPropertyInt(props, PORT_PROBE_PORT, null);
+
+ if (port >= 65536) {
+ throw new IOException(PORT_PROBE_PORT + " " + port + " is out of " +
+ "range");
+ }
+
+ int timeout = getPropertyInt(props, PORT_PROBE_CONNECT_TIMEOUT,
+ PORT_PROBE_CONNECT_TIMEOUT_DEFAULT);
+
+ return new PortProbe(port, timeout);
+ }
+
+ /**
+ * Try to connect to the (host,port); a failure to connect within
+ * the specified timeout is a failure.
+ * @param instance role instance
+ * @return the outcome
+ */
+ @Override
+ public ProbeStatus ping(ComponentInstance instance) {
+ ProbeStatus status = new ProbeStatus();
+
+ if (instance.getContainerStatus() == null || SliderUtils
+ .isEmpty(instance.getContainerStatus().getIPs())) {
+ status.fail(this, new IOException(
+ instance.getCompInstanceName() + ": IP is not available yet"));
+ return status;
+ }
+
+ String ip = instance.getContainerStatus().getIPs().get(0);
+ InetSocketAddress sockAddr = new InetSocketAddress(ip, port);
+ Socket socket = new Socket();
+ try {
+ if (log.isDebugEnabled()) {
+ log.debug(instance.getCompInstanceName() + ": Connecting " + sockAddr
+ .toString() + ", timeout=" + MonitorUtils
+ .millisToHumanTime(timeout));
+ }
+ socket.connect(sockAddr, timeout);
+ status.succeed(this);
+ } catch (Throwable e) {
+ String error =
+ instance.getCompInstanceName() + ": Probe " + sockAddr + " failed";
+ log.debug(error, e);
+ status.fail(this, new IOException(error, e));
+ } finally {
+ IOUtils.closeSocket(socket);
+ }
+ return status;
+ }
+}
---------------------------------------------------------------------
To unsubscribe, e-mail: common-commits-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-commits-help@hadoop.apache.org