You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-issues@hadoop.apache.org by "K0K0V0K (via GitHub)" <gi...@apache.org> on 2023/06/19 09:03:00 UTC

[GitHub] [hadoop] K0K0V0K commented on a diff in pull request #5745: Yarn 11511 - Improve TestRMWebServices test config and data

K0K0V0K commented on code in PR #5745:
URL: https://github.com/apache/hadoop/pull/5745#discussion_r1233748847


##########
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServicesCapacitySched.java:
##########
@@ -18,274 +18,122 @@
 
 package org.apache.hadoop.yarn.server.resourcemanager.webapp;
 
-import com.fasterxml.jackson.core.JsonProcessingException;
-import com.fasterxml.jackson.databind.ObjectMapper;
+import javax.ws.rs.core.MediaType;
+
 import com.google.inject.Guice;
-import com.google.inject.servlet.ServletModule;
 import com.sun.jersey.api.client.ClientResponse;
-import com.sun.jersey.api.client.WebResource;
-import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
-import com.sun.jersey.test.framework.WebAppDescriptor;
-
-import java.io.BufferedReader;
-import java.io.FileWriter;
-import java.io.IOException;
-import java.io.InputStream;
-import java.io.InputStreamReader;
-import java.io.StringReader;
-import java.io.StringWriter;
-import java.net.URISyntaxException;
-import java.util.Objects;
-import java.util.stream.Collectors;
-
-import javax.ws.rs.core.MediaType;
-import javax.xml.parsers.DocumentBuilder;
-import javax.xml.parsers.DocumentBuilderFactory;
-import javax.xml.transform.OutputKeys;
-import javax.xml.transform.Transformer;
-import javax.xml.transform.TransformerException;
-import javax.xml.transform.TransformerFactory;
-import javax.xml.transform.dom.DOMSource;
-import javax.xml.transform.stream.StreamResult;
+import org.codehaus.jettison.json.JSONObject;
+import org.junit.After;
+import org.junit.Test;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.http.JettyUtils;
-import org.apache.hadoop.util.XMLUtils;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.resourcemanager.MockRM;
 import org.apache.hadoop.yarn.server.resourcemanager.MockRMAppSubmissionData;
 import org.apache.hadoop.yarn.server.resourcemanager.MockRMAppSubmitter;
-import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
 import org.apache.hadoop.yarn.util.resource.Resources;
-import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
 import org.apache.hadoop.yarn.webapp.GuiceServletConfig;
 import org.apache.hadoop.yarn.webapp.JerseyTestBase;
-import org.codehaus.jettison.json.JSONException;
-import org.codehaus.jettison.json.JSONObject;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
-import org.w3c.dom.Document;
-import org.xml.sax.InputSource;
 
-import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration.AUTO_CREATED_LEAF_QUEUE_TEMPLATE_PREFIX;
-import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration.CAPACITY;
-import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration.DOT;
-import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration.MAX_PARALLEL_APPLICATIONS;
-import static org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration.PREFIX;
+import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.WebServletModule;
+import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.assertJsonResponse;
+import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.assertJsonType;
+import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.assertXmlResponse;
+import static org.apache.hadoop.yarn.server.resourcemanager.webapp.TestWebServiceUtil.createWebAppDescriptor;
 import static org.junit.Assert.assertEquals;
 
 public class TestRMWebServicesCapacitySched extends JerseyTestBase {
 
   private MockRM rm;
 
-  public static class WebServletModule extends ServletModule {
-    private final MockRM rm;
-
-    WebServletModule(MockRM rm) {
-      this.rm = rm;
-    }
-
-    @Override
-    protected void configureServlets() {
-      bind(JAXBContextResolver.class);
-      bind(RMWebServices.class);
-      bind(GenericExceptionHandler.class);
-      bind(ResourceManager.class).toInstance(rm);
-      serve("/*").with(GuiceContainer.class);
-    }
-  }
-
   public TestRMWebServicesCapacitySched() {
     super(createWebAppDescriptor());
   }
 
-  @Before
-  @Override
-  public void setUp() throws Exception {
-    super.setUp();
-    rm = createMockRM(new CapacitySchedulerConfiguration(
-        new Configuration(false)));
-    GuiceServletConfig.setInjector(
-        Guice.createInjector(new WebServletModule(rm)));
-  }
-
-  public static void setupQueueConfiguration(
-      CapacitySchedulerConfiguration config) {
-
-    // Define top-level queues
-    config.setQueues(CapacitySchedulerConfiguration.ROOT,
-        new String[] {"a", "b", "c"});
-
-    final String a = CapacitySchedulerConfiguration.ROOT + ".a";
-    config.setCapacity(a, 10.5f);
-    config.setMaximumCapacity(a, 50);
-    config.setInt(CapacitySchedulerConfiguration.getQueuePrefix(a) + MAX_PARALLEL_APPLICATIONS, 42);
-
-    final String b = CapacitySchedulerConfiguration.ROOT + ".b";
-    config.setCapacity(b, 89.5f);
-
-    final String c = CapacitySchedulerConfiguration.ROOT + ".c";
-    config.setCapacity(c, "[memory=1024]");
-
-    // Define 2nd-level queues
-    final String a1 = a + ".a1";
-    final String a2 = a + ".a2";
-    config.setQueues(a, new String[] {"a1", "a2"});
-    config.setCapacity(a1, 30);
-    config.setMaximumCapacity(a1, 50);
-    config.setMaximumLifetimePerQueue(a2, 100);
-    config.setDefaultLifetimePerQueue(a2, 50);
-
-    config.setUserLimitFactor(a1, 100.0f);
-    config.setCapacity(a2, 70);
-    config.setUserLimitFactor(a2, 100.0f);
-
-    final String b1 = b + ".b1";
-    final String b2 = b + ".b2";
-    final String b3 = b + ".b3";
-    config.setQueues(b, new String[] {"b1", "b2", "b3"});
-    config.setCapacity(b1, 60);
-    config.setUserLimitFactor(b1, 100.0f);
-    config.setCapacity(b2, 39.5f);
-    config.setUserLimitFactor(b2, 100.0f);
-    config.setCapacity(b3, 0.5f);
-    config.setUserLimitFactor(b3, 100.0f);
-
-    config.setQueues(a1, new String[] {"a1a", "a1b", "a1c"});
-    final String a1A = a1 + ".a1a";
-    config.setCapacity(a1A, 65);
-    final String a1B = a1 + ".a1b";
-    config.setCapacity(a1B, 15);
-    final String a1C = a1 + ".a1c";
-    config.setCapacity(a1C, 20);
-
-    config.setAutoCreateChildQueueEnabled(a1C, true);
-    config.setInt(PREFIX + a1C + DOT + AUTO_CREATED_LEAF_QUEUE_TEMPLATE_PREFIX
-        + DOT + CAPACITY, 50);
+  @After
+  public void shoutDown(){

Review Comment:
   thx, fixed



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: common-issues-unsubscribe@hadoop.apache.org

For queries about this service, please contact Infrastructure at:
users@infra.apache.org


---------------------------------------------------------------------
To unsubscribe, e-mail: common-issues-unsubscribe@hadoop.apache.org
For additional commands, e-mail: common-issues-help@hadoop.apache.org