You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@hadoop.apache.org by Mouzzam Hussain <mo...@gmail.com> on 2015/02/26 11:53:08 UTC

YarnClient to get the running applications list in java

 I am working with YarnClient for the 1st time. My goal is to get and
display the applications running on Yarn using Java. My project setup is as
follows:

public static void main(String[] args) throws IOException, YarnException {
    // Create yarnClient
    YarnConfiguration conf = new YarnConfiguration();
    YarnClient yarnClient = YarnClient.createYarnClient();
    yarnClient.init(conf);

    try {
        List applications = yarnClient.getApplications();
        System.err.println("yarn client : " + applications.size());
    } catch (YarnException e) {
        e.printStackTrace();
    } catch (IOException e) {
        e.printStackTrace();
    }
}

I get the following exception when i run the program:

java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:293)
at java.lang.Thread.run(Thread.java:745)Caused by:
java.lang.NoClassDefFoundError:
org/apache/hadoop/HadoopIllegalArgumentException
at projects.HelloWorld.main(HelloWorld.java:16)... 6 moreCaused by:
java.lang.ClassNotFoundException:
org.apache.hadoop.HadoopIllegalArgumentException
at java.net.URLClassLoader$1.run(URLClassLoader.java:372)
at java.net.URLClassLoader$1.run(URLClassLoader.java:361)
at java.security.AccessController.doPrivileged(Native Method)
at java.net.URLClassLoader.findClass(URLClassLoader.java:360)
at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)

The POM file is as follows:

    <?xml version="1.0" encoding="UTF-8"?><project
xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>BigContent</groupId>
    <artifactId>ManagementServer</artifactId>
    <version>1.0-SNAPSHOT</version>


    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <hadoop.version>2.4.0</hadoop.version>
        <spark.version>1.2.1</spark.version>
    </properties>

    <build>
        <plugins>

            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.2</version>
                <configuration>
                    <source>1.7</source>
                    <target>1.7</target>
                </configuration>
            </plugin>

            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-war-plugin</artifactId>
                <version>2.3</version>
                <executions>
                    <execution>
                        <!-- First step is to disable the default-war
build step. -->
                        <id>default-war</id>
                        <phase>none</phase>
                    </execution>
                    <execution>
                        <!-- Second step is to create an exploded war.
Done in prepare-package -->
                        <id>war-exploded</id>
                        <phase>prepare-package</phase>
                        <goals>
                            <goal>exploded</goal>
                        </goals>
                    </execution>
                    <execution>
                        <!-- Last step is to make sure that the war is
built in the package phase -->
                        <id>custom-war</id>
                        <phase>package</phase>
                        <goals>
                            <goal>war</goal>
                        </goals>
                    </execution>
                </executions>
                <configuration>
                    <webXml>src/main/webapp/WEB-INF/web.xml</webXml>
                    <webResources>
                        <resource>
                            <!-- this is relative to the pom.xml directory -->
                            <directory>resource2</directory>
                        </resource>
                    </webResources>
                </configuration>
            </plugin>
        </plugins>
    </build>


    <dependencies>


        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-streaming_2.10</artifactId>
            <version>${spark.version}</version>
            <scope>provided</scope>
        </dependency>

        <dependency>
            <groupId>com.sun.jersey</groupId>
            <artifactId>jersey-core</artifactId>
            <version>1.9.1</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>${hadoop.version}</version>
            <exclusions>
                <exclusion>
                    <groupId>javax.servlet</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-yarn-common</artifactId>
            <version>${hadoop.version}</version>
        </dependency>


        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop.version}</version>
            <scope>provided</scope>
            <exclusions>
                <exclusion>
                    <groupId>javax.servlet</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>com.google.guava</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
            </exclusions>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-core</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <dependency>
            <groupId>json-mapreduce</groupId>
            <artifactId>json-mapreduce</artifactId>
            <version>1.0-SNAPSHOT</version>
            <exclusions>
                <exclusion>
                    <groupId>javax.servlet</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>commons-io</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>commons-lang</groupId>
                    <artifactId>*</artifactId>
                </exclusion>
                <exclusion>
                    <groupId>org.apache.hadoop</groupId>
                    <artifactId>hadoop-common</artifactId>
                </exclusion>
            </exclusions>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-core</artifactId>
            <version>0.20.2</version>
        </dependency>


        <dependency>
            <groupId>com.google.guava</groupId>
            <artifactId>guava</artifactId>
            <version>16.0</version>
        </dependency>

        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.11</version>
            <scope>test</scope>
        </dependency>

        <dependency>
            <groupId>org.slf4j</groupId>
            <artifactId>slf4j-log4j12</artifactId>
            <version>1.7.7</version>
        </dependency>

        <dependency>
            <groupId>com.fasterxml.jackson.core</groupId>
            <artifactId>jackson-databind</artifactId>
            <version>2.5.0</version>
        </dependency>



    </dependencies>

</project>

Can you please help me in fixing the issue, or if you can suggest a better
way?

Best Regards,
Mouzzam Hussain

RE: YarnClient to get the running applications list in java

Posted by Rohith Sharma K S <ro...@huawei.com>.
Simple way to meet your goal , you can add hadoop jars into project classpath. I.e  If you have hadoop package, extract it and add all the jars into project classpath.

Then you change java code below

    YarnConfiguration conf = new YarnConfiguration();

    conf.set("yarn.resourcemanager.address", "rm-ip:port"); // Running RM address

    YarnClient yarnClient = YarnClient.createYarnClient();
    yarnClient.init(conf);

    yarnClient.start(); // you need to start YarnClient service

// code to getApplications()

  }


Thanks & Regards
Rohith Sharma K S
From: Mouzzam Hussain [mailto:monibaba06@gmail.com]
Sent: 26 February 2015 16:23
To: user@hadoop.apache.org
Subject: YarnClient to get the running applications list in java


I am working with YarnClient for the 1st time. My goal is to get and display the applications running on Yarn using Java. My project setup is as follows:

public static void main(String[] args) throws IOException, YarnException {

    // Create yarnClient

    YarnConfiguration conf = new YarnConfiguration();

    YarnClient yarnClient = YarnClient.createYarnClient();

    yarnClient.init(conf);



    try {

        List applications = yarnClient.getApplications();

        System.err.println("yarn client : " + applications.size());

    } catch (YarnException e) {

        e.printStackTrace();

    } catch (IOException e) {

        e.printStackTrace();

    }



}

I get the following exception when i run the program:

java.lang.reflect.InvocationTargetException

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:483)

at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:293)

at java.lang.Thread.run(Thread.java:745)

Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/HadoopIllegalArgumentException

at projects.HelloWorld.main(HelloWorld.java:16)

... 6 more

Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.HadoopIllegalArgumentException

at java.net.URLClassLoader$1.run(URLClassLoader.java:372)

at java.net.URLClassLoader$1.run(URLClassLoader.java:361)

at java.security.AccessController.doPrivileged(Native Method)

at java.net.URLClassLoader.findClass(URLClassLoader.java:360)

at java.lang.ClassLoader.loadClass(ClassLoader.java:424)

at java.lang.ClassLoader.loadClass(ClassLoader.java:357)

The POM file is as follows:

    <?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0"

         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">

    <modelVersion>4.0.0</modelVersion>



    <groupId>BigContent</groupId>

    <artifactId>ManagementServer</artifactId>

    <version>1.0-SNAPSHOT</version>





    <properties>

        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

        <hadoop.version>2.4.0</hadoop.version>

        <spark.version>1.2.1</spark.version>

    </properties>



    <build>

        <plugins>



            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-compiler-plugin</artifactId>

                <version>3.2</version>

                <configuration>

                    <source>1.7</source>

                    <target>1.7</target>

                </configuration>

            </plugin>



            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-war-plugin</artifactId>

                <version>2.3</version>

                <executions>

                    <execution>

                        <!-- First step is to disable the default-war build step. -->

                        <id>default-war</id>

                        <phase>none</phase>

                    </execution>

                    <execution>

                        <!-- Second step is to create an exploded war. Done in prepare-package -->

                        <id>war-exploded</id>

                        <phase>prepare-package</phase>

                        <goals>

                            <goal>exploded</goal>

                        </goals>

                    </execution>

                    <execution>

                        <!-- Last step is to make sure that the war is built in the package phase -->

                        <id>custom-war</id>

                        <phase>package</phase>

                        <goals>

                            <goal>war</goal>

                        </goals>

                    </execution>

                </executions>

                <configuration>

                    <webXml>src/main/webapp/WEB-INF/web.xml</webXml>

                    <webResources>

                        <resource>

                            <!-- this is relative to the pom.xml directory -->

                            <directory>resource2</directory>

                        </resource>

                    </webResources>

                </configuration>

            </plugin>

        </plugins>

    </build>





    <dependencies>





        <dependency>

            <groupId>org.apache.spark</groupId>

            <artifactId>spark-streaming_2.10</artifactId>

            <version>${spark.version}</version>

            <scope>provided</scope>

        </dependency>



        <dependency>

            <groupId>com.sun.jersey</groupId>

            <artifactId>jersey-core</artifactId>

            <version>1.9.1</version>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-client</artifactId>

            <version>${hadoop.version}</version>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-yarn-common</artifactId>

            <version>${hadoop.version}</version>

        </dependency>





        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-common</artifactId>

            <version>${hadoop.version}</version>

            <scope>provided</scope>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>com.google.guava</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-mapreduce-client-core</artifactId>

            <version>${hadoop.version}</version>

        </dependency>



        <dependency>

            <groupId>json-mapreduce</groupId>

            <artifactId>json-mapreduce</artifactId>

            <version>1.0-SNAPSHOT</version>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>commons-io</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>commons-lang</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>org.apache.hadoop</groupId>

                    <artifactId>hadoop-common</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-core</artifactId>

            <version>0.20.2</version>

        </dependency>





        <dependency>

            <groupId>com.google.guava</groupId>

            <artifactId>guava</artifactId>

            <version>16.0</version>

        </dependency>



        <dependency>

            <groupId>junit</groupId>

            <artifactId>junit</artifactId>

            <version>4.11</version>

            <scope>test</scope>

        </dependency>



        <dependency>

            <groupId>org.slf4j</groupId>

            <artifactId>slf4j-log4j12</artifactId>

            <version>1.7.7</version>

        </dependency>



        <dependency>

            <groupId>com.fasterxml.jackson.core</groupId>

            <artifactId>jackson-databind</artifactId>

            <version>2.5.0</version>

        </dependency>







    </dependencies>





</project>

Can you please help me in fixing the issue, or if you can suggest a better way?


Best Regards,
Mouzzam Hussain


RE: YarnClient to get the running applications list in java

Posted by Rohith Sharma K S <ro...@huawei.com>.
Simple way to meet your goal , you can add hadoop jars into project classpath. I.e  If you have hadoop package, extract it and add all the jars into project classpath.

Then you change java code below

    YarnConfiguration conf = new YarnConfiguration();

    conf.set("yarn.resourcemanager.address", "rm-ip:port"); // Running RM address

    YarnClient yarnClient = YarnClient.createYarnClient();
    yarnClient.init(conf);

    yarnClient.start(); // you need to start YarnClient service

// code to getApplications()

  }


Thanks & Regards
Rohith Sharma K S
From: Mouzzam Hussain [mailto:monibaba06@gmail.com]
Sent: 26 February 2015 16:23
To: user@hadoop.apache.org
Subject: YarnClient to get the running applications list in java


I am working with YarnClient for the 1st time. My goal is to get and display the applications running on Yarn using Java. My project setup is as follows:

public static void main(String[] args) throws IOException, YarnException {

    // Create yarnClient

    YarnConfiguration conf = new YarnConfiguration();

    YarnClient yarnClient = YarnClient.createYarnClient();

    yarnClient.init(conf);



    try {

        List applications = yarnClient.getApplications();

        System.err.println("yarn client : " + applications.size());

    } catch (YarnException e) {

        e.printStackTrace();

    } catch (IOException e) {

        e.printStackTrace();

    }



}

I get the following exception when i run the program:

java.lang.reflect.InvocationTargetException

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:483)

at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:293)

at java.lang.Thread.run(Thread.java:745)

Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/HadoopIllegalArgumentException

at projects.HelloWorld.main(HelloWorld.java:16)

... 6 more

Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.HadoopIllegalArgumentException

at java.net.URLClassLoader$1.run(URLClassLoader.java:372)

at java.net.URLClassLoader$1.run(URLClassLoader.java:361)

at java.security.AccessController.doPrivileged(Native Method)

at java.net.URLClassLoader.findClass(URLClassLoader.java:360)

at java.lang.ClassLoader.loadClass(ClassLoader.java:424)

at java.lang.ClassLoader.loadClass(ClassLoader.java:357)

The POM file is as follows:

    <?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0"

         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">

    <modelVersion>4.0.0</modelVersion>



    <groupId>BigContent</groupId>

    <artifactId>ManagementServer</artifactId>

    <version>1.0-SNAPSHOT</version>





    <properties>

        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

        <hadoop.version>2.4.0</hadoop.version>

        <spark.version>1.2.1</spark.version>

    </properties>



    <build>

        <plugins>



            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-compiler-plugin</artifactId>

                <version>3.2</version>

                <configuration>

                    <source>1.7</source>

                    <target>1.7</target>

                </configuration>

            </plugin>



            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-war-plugin</artifactId>

                <version>2.3</version>

                <executions>

                    <execution>

                        <!-- First step is to disable the default-war build step. -->

                        <id>default-war</id>

                        <phase>none</phase>

                    </execution>

                    <execution>

                        <!-- Second step is to create an exploded war. Done in prepare-package -->

                        <id>war-exploded</id>

                        <phase>prepare-package</phase>

                        <goals>

                            <goal>exploded</goal>

                        </goals>

                    </execution>

                    <execution>

                        <!-- Last step is to make sure that the war is built in the package phase -->

                        <id>custom-war</id>

                        <phase>package</phase>

                        <goals>

                            <goal>war</goal>

                        </goals>

                    </execution>

                </executions>

                <configuration>

                    <webXml>src/main/webapp/WEB-INF/web.xml</webXml>

                    <webResources>

                        <resource>

                            <!-- this is relative to the pom.xml directory -->

                            <directory>resource2</directory>

                        </resource>

                    </webResources>

                </configuration>

            </plugin>

        </plugins>

    </build>





    <dependencies>





        <dependency>

            <groupId>org.apache.spark</groupId>

            <artifactId>spark-streaming_2.10</artifactId>

            <version>${spark.version}</version>

            <scope>provided</scope>

        </dependency>



        <dependency>

            <groupId>com.sun.jersey</groupId>

            <artifactId>jersey-core</artifactId>

            <version>1.9.1</version>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-client</artifactId>

            <version>${hadoop.version}</version>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-yarn-common</artifactId>

            <version>${hadoop.version}</version>

        </dependency>





        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-common</artifactId>

            <version>${hadoop.version}</version>

            <scope>provided</scope>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>com.google.guava</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-mapreduce-client-core</artifactId>

            <version>${hadoop.version}</version>

        </dependency>



        <dependency>

            <groupId>json-mapreduce</groupId>

            <artifactId>json-mapreduce</artifactId>

            <version>1.0-SNAPSHOT</version>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>commons-io</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>commons-lang</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>org.apache.hadoop</groupId>

                    <artifactId>hadoop-common</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-core</artifactId>

            <version>0.20.2</version>

        </dependency>





        <dependency>

            <groupId>com.google.guava</groupId>

            <artifactId>guava</artifactId>

            <version>16.0</version>

        </dependency>



        <dependency>

            <groupId>junit</groupId>

            <artifactId>junit</artifactId>

            <version>4.11</version>

            <scope>test</scope>

        </dependency>



        <dependency>

            <groupId>org.slf4j</groupId>

            <artifactId>slf4j-log4j12</artifactId>

            <version>1.7.7</version>

        </dependency>



        <dependency>

            <groupId>com.fasterxml.jackson.core</groupId>

            <artifactId>jackson-databind</artifactId>

            <version>2.5.0</version>

        </dependency>







    </dependencies>





</project>

Can you please help me in fixing the issue, or if you can suggest a better way?


Best Regards,
Mouzzam Hussain


RE: YarnClient to get the running applications list in java

Posted by Rohith Sharma K S <ro...@huawei.com>.
Simple way to meet your goal , you can add hadoop jars into project classpath. I.e  If you have hadoop package, extract it and add all the jars into project classpath.

Then you change java code below

    YarnConfiguration conf = new YarnConfiguration();

    conf.set("yarn.resourcemanager.address", "rm-ip:port"); // Running RM address

    YarnClient yarnClient = YarnClient.createYarnClient();
    yarnClient.init(conf);

    yarnClient.start(); // you need to start YarnClient service

// code to getApplications()

  }


Thanks & Regards
Rohith Sharma K S
From: Mouzzam Hussain [mailto:monibaba06@gmail.com]
Sent: 26 February 2015 16:23
To: user@hadoop.apache.org
Subject: YarnClient to get the running applications list in java


I am working with YarnClient for the 1st time. My goal is to get and display the applications running on Yarn using Java. My project setup is as follows:

public static void main(String[] args) throws IOException, YarnException {

    // Create yarnClient

    YarnConfiguration conf = new YarnConfiguration();

    YarnClient yarnClient = YarnClient.createYarnClient();

    yarnClient.init(conf);



    try {

        List applications = yarnClient.getApplications();

        System.err.println("yarn client : " + applications.size());

    } catch (YarnException e) {

        e.printStackTrace();

    } catch (IOException e) {

        e.printStackTrace();

    }



}

I get the following exception when i run the program:

java.lang.reflect.InvocationTargetException

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:483)

at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:293)

at java.lang.Thread.run(Thread.java:745)

Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/HadoopIllegalArgumentException

at projects.HelloWorld.main(HelloWorld.java:16)

... 6 more

Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.HadoopIllegalArgumentException

at java.net.URLClassLoader$1.run(URLClassLoader.java:372)

at java.net.URLClassLoader$1.run(URLClassLoader.java:361)

at java.security.AccessController.doPrivileged(Native Method)

at java.net.URLClassLoader.findClass(URLClassLoader.java:360)

at java.lang.ClassLoader.loadClass(ClassLoader.java:424)

at java.lang.ClassLoader.loadClass(ClassLoader.java:357)

The POM file is as follows:

    <?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0"

         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">

    <modelVersion>4.0.0</modelVersion>



    <groupId>BigContent</groupId>

    <artifactId>ManagementServer</artifactId>

    <version>1.0-SNAPSHOT</version>





    <properties>

        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

        <hadoop.version>2.4.0</hadoop.version>

        <spark.version>1.2.1</spark.version>

    </properties>



    <build>

        <plugins>



            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-compiler-plugin</artifactId>

                <version>3.2</version>

                <configuration>

                    <source>1.7</source>

                    <target>1.7</target>

                </configuration>

            </plugin>



            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-war-plugin</artifactId>

                <version>2.3</version>

                <executions>

                    <execution>

                        <!-- First step is to disable the default-war build step. -->

                        <id>default-war</id>

                        <phase>none</phase>

                    </execution>

                    <execution>

                        <!-- Second step is to create an exploded war. Done in prepare-package -->

                        <id>war-exploded</id>

                        <phase>prepare-package</phase>

                        <goals>

                            <goal>exploded</goal>

                        </goals>

                    </execution>

                    <execution>

                        <!-- Last step is to make sure that the war is built in the package phase -->

                        <id>custom-war</id>

                        <phase>package</phase>

                        <goals>

                            <goal>war</goal>

                        </goals>

                    </execution>

                </executions>

                <configuration>

                    <webXml>src/main/webapp/WEB-INF/web.xml</webXml>

                    <webResources>

                        <resource>

                            <!-- this is relative to the pom.xml directory -->

                            <directory>resource2</directory>

                        </resource>

                    </webResources>

                </configuration>

            </plugin>

        </plugins>

    </build>





    <dependencies>





        <dependency>

            <groupId>org.apache.spark</groupId>

            <artifactId>spark-streaming_2.10</artifactId>

            <version>${spark.version}</version>

            <scope>provided</scope>

        </dependency>



        <dependency>

            <groupId>com.sun.jersey</groupId>

            <artifactId>jersey-core</artifactId>

            <version>1.9.1</version>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-client</artifactId>

            <version>${hadoop.version}</version>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-yarn-common</artifactId>

            <version>${hadoop.version}</version>

        </dependency>





        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-common</artifactId>

            <version>${hadoop.version}</version>

            <scope>provided</scope>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>com.google.guava</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-mapreduce-client-core</artifactId>

            <version>${hadoop.version}</version>

        </dependency>



        <dependency>

            <groupId>json-mapreduce</groupId>

            <artifactId>json-mapreduce</artifactId>

            <version>1.0-SNAPSHOT</version>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>commons-io</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>commons-lang</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>org.apache.hadoop</groupId>

                    <artifactId>hadoop-common</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-core</artifactId>

            <version>0.20.2</version>

        </dependency>





        <dependency>

            <groupId>com.google.guava</groupId>

            <artifactId>guava</artifactId>

            <version>16.0</version>

        </dependency>



        <dependency>

            <groupId>junit</groupId>

            <artifactId>junit</artifactId>

            <version>4.11</version>

            <scope>test</scope>

        </dependency>



        <dependency>

            <groupId>org.slf4j</groupId>

            <artifactId>slf4j-log4j12</artifactId>

            <version>1.7.7</version>

        </dependency>



        <dependency>

            <groupId>com.fasterxml.jackson.core</groupId>

            <artifactId>jackson-databind</artifactId>

            <version>2.5.0</version>

        </dependency>







    </dependencies>





</project>

Can you please help me in fixing the issue, or if you can suggest a better way?


Best Regards,
Mouzzam Hussain


RE: YarnClient to get the running applications list in java

Posted by Rohith Sharma K S <ro...@huawei.com>.
Simple way to meet your goal , you can add hadoop jars into project classpath. I.e  If you have hadoop package, extract it and add all the jars into project classpath.

Then you change java code below

    YarnConfiguration conf = new YarnConfiguration();

    conf.set("yarn.resourcemanager.address", "rm-ip:port"); // Running RM address

    YarnClient yarnClient = YarnClient.createYarnClient();
    yarnClient.init(conf);

    yarnClient.start(); // you need to start YarnClient service

// code to getApplications()

  }


Thanks & Regards
Rohith Sharma K S
From: Mouzzam Hussain [mailto:monibaba06@gmail.com]
Sent: 26 February 2015 16:23
To: user@hadoop.apache.org
Subject: YarnClient to get the running applications list in java


I am working with YarnClient for the 1st time. My goal is to get and display the applications running on Yarn using Java. My project setup is as follows:

public static void main(String[] args) throws IOException, YarnException {

    // Create yarnClient

    YarnConfiguration conf = new YarnConfiguration();

    YarnClient yarnClient = YarnClient.createYarnClient();

    yarnClient.init(conf);



    try {

        List applications = yarnClient.getApplications();

        System.err.println("yarn client : " + applications.size());

    } catch (YarnException e) {

        e.printStackTrace();

    } catch (IOException e) {

        e.printStackTrace();

    }



}

I get the following exception when i run the program:

java.lang.reflect.InvocationTargetException

at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

at java.lang.reflect.Method.invoke(Method.java:483)

at org.codehaus.mojo.exec.ExecJavaMojo$1.run(ExecJavaMojo.java:293)

at java.lang.Thread.run(Thread.java:745)

Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/HadoopIllegalArgumentException

at projects.HelloWorld.main(HelloWorld.java:16)

... 6 more

Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.HadoopIllegalArgumentException

at java.net.URLClassLoader$1.run(URLClassLoader.java:372)

at java.net.URLClassLoader$1.run(URLClassLoader.java:361)

at java.security.AccessController.doPrivileged(Native Method)

at java.net.URLClassLoader.findClass(URLClassLoader.java:360)

at java.lang.ClassLoader.loadClass(ClassLoader.java:424)

at java.lang.ClassLoader.loadClass(ClassLoader.java:357)

The POM file is as follows:

    <?xml version="1.0" encoding="UTF-8"?>

<project xmlns="http://maven.apache.org/POM/4.0.0"

         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"

         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">

    <modelVersion>4.0.0</modelVersion>



    <groupId>BigContent</groupId>

    <artifactId>ManagementServer</artifactId>

    <version>1.0-SNAPSHOT</version>





    <properties>

        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

        <hadoop.version>2.4.0</hadoop.version>

        <spark.version>1.2.1</spark.version>

    </properties>



    <build>

        <plugins>



            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-compiler-plugin</artifactId>

                <version>3.2</version>

                <configuration>

                    <source>1.7</source>

                    <target>1.7</target>

                </configuration>

            </plugin>



            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-war-plugin</artifactId>

                <version>2.3</version>

                <executions>

                    <execution>

                        <!-- First step is to disable the default-war build step. -->

                        <id>default-war</id>

                        <phase>none</phase>

                    </execution>

                    <execution>

                        <!-- Second step is to create an exploded war. Done in prepare-package -->

                        <id>war-exploded</id>

                        <phase>prepare-package</phase>

                        <goals>

                            <goal>exploded</goal>

                        </goals>

                    </execution>

                    <execution>

                        <!-- Last step is to make sure that the war is built in the package phase -->

                        <id>custom-war</id>

                        <phase>package</phase>

                        <goals>

                            <goal>war</goal>

                        </goals>

                    </execution>

                </executions>

                <configuration>

                    <webXml>src/main/webapp/WEB-INF/web.xml</webXml>

                    <webResources>

                        <resource>

                            <!-- this is relative to the pom.xml directory -->

                            <directory>resource2</directory>

                        </resource>

                    </webResources>

                </configuration>

            </plugin>

        </plugins>

    </build>





    <dependencies>





        <dependency>

            <groupId>org.apache.spark</groupId>

            <artifactId>spark-streaming_2.10</artifactId>

            <version>${spark.version}</version>

            <scope>provided</scope>

        </dependency>



        <dependency>

            <groupId>com.sun.jersey</groupId>

            <artifactId>jersey-core</artifactId>

            <version>1.9.1</version>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-client</artifactId>

            <version>${hadoop.version}</version>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-yarn-common</artifactId>

            <version>${hadoop.version}</version>

        </dependency>





        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-common</artifactId>

            <version>${hadoop.version}</version>

            <scope>provided</scope>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>com.google.guava</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-mapreduce-client-core</artifactId>

            <version>${hadoop.version}</version>

        </dependency>



        <dependency>

            <groupId>json-mapreduce</groupId>

            <artifactId>json-mapreduce</artifactId>

            <version>1.0-SNAPSHOT</version>

            <exclusions>

                <exclusion>

                    <groupId>javax.servlet</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>commons-io</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>commons-lang</groupId>

                    <artifactId>*</artifactId>

                </exclusion>

                <exclusion>

                    <groupId>org.apache.hadoop</groupId>

                    <artifactId>hadoop-common</artifactId>

                </exclusion>

            </exclusions>

        </dependency>



        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-core</artifactId>

            <version>0.20.2</version>

        </dependency>





        <dependency>

            <groupId>com.google.guava</groupId>

            <artifactId>guava</artifactId>

            <version>16.0</version>

        </dependency>



        <dependency>

            <groupId>junit</groupId>

            <artifactId>junit</artifactId>

            <version>4.11</version>

            <scope>test</scope>

        </dependency>



        <dependency>

            <groupId>org.slf4j</groupId>

            <artifactId>slf4j-log4j12</artifactId>

            <version>1.7.7</version>

        </dependency>



        <dependency>

            <groupId>com.fasterxml.jackson.core</groupId>

            <artifactId>jackson-databind</artifactId>

            <version>2.5.0</version>

        </dependency>







    </dependencies>





</project>

Can you please help me in fixing the issue, or if you can suggest a better way?


Best Regards,
Mouzzam Hussain