You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by PS...@in.imshealth.com on 2017/04/06 08:03:44 UTC

scala test is unable to initialize spark context.

Hi All ,

   I am just trying to use scala test for testing a small spark code . But spark context is not getting initialized , while I am running test file .
I have given code, pom and exception I am getting in mail , please help me to understand what mistake I am doing , so that
Spark context is not getting initialized

Code:-

import org.apache.log4j.LogManager
import org.apache.spark.SharedSparkContext
import org.scalatest.FunSuite
import org.apache.spark.{SparkContext, SparkConf}

/**
 * Created by PSwain on 4/5/2017.
  */
class Test extends FunSuite with SharedSparkContext  {


  test("test initializing spark context") {
    val list = List(1, 2, 3, 4)
    val rdd = sc.parallelize(list)
    assert(list.length === rdd.count())
  }
}

POM File:-


<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>tesing.loging</groupId>
    <artifactId>logging</artifactId>
    <version>1.0-SNAPSHOT</version>


    <repositories>
        <repository>
            <id>central</id>
            <name>central</name>
            <url>http://repo1.maven.org/maven/</url>
        </repository>
    </repositories>

    <dependencies>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.10</artifactId>
            <version>1.6.0</version>
            <type>test-jar</type>


        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.10</artifactId>
            <version>1.6.0</version>
        </dependency>

        <dependency>
            <groupId>org.scalatest</groupId>
            <artifactId>scalatest_2.10</artifactId>
            <version>2.2.6</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-hive_2.10</artifactId>
            <version>1.5.0</version>
            <scope>provided</scope>
        </dependency>
        <dependency>
            <groupId>com.databricks</groupId>
            <artifactId>spark-csv_2.10</artifactId>
            <version>1.3.0</version>
        </dependency>
        <dependency>
            <groupId>com.rxcorp.bdf.logging</groupId>
            <artifactId>loggingframework</artifactId>
            <version>1.0-SNAPSHOT</version>
        </dependency>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.6</version>
            <scope>provided</scope>
        </dependency>
        <!-- Scala -->
        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>2.10.5</version>
            <scope>compile</scope>
            <optional>true</optional>
        </dependency>

        <dependency>
            <groupId>org.scalatest</groupId>
            <artifactId>scalatest</artifactId>
            <version>1.4.RC2</version>
        </dependency>

        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>

        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-compiler</artifactId>
            <version>2.10.5</version>
            <scope>compile</scope>
            <optional>true</optional>
        </dependency>
        <!-- END Scala -->
    </dependencies>
    <build>
        <sourceDirectory>src/main/scala</sourceDirectory>
        <plugins>
            <plugin>
                <artifactId>maven-assembly-plugin</artifactId>
                <version>2.2.1</version>
                <configuration>
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependencies</descriptorRef>
                    </descriptorRefs>
                </configuration>
                <executions>
                    <execution>
                        <id>make-assembly</id>
                        <phase>package</phase>
                        <goals>
                            <goal>single</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>net.alchim31.maven</groupId>
                <artifactId>scala-maven-plugin</artifactId>
                <version>3.2.0</version>
                <executions>
                    <execution>
                        <goals>
                            <goal>compile</goal>
                            <goal>testCompile</goal>
                        </goals>
                    </execution>
                </executions>
                <configuration>
                    <sourceDir>src/main/scala</sourceDir>

                    <jvmArgs>
                        <jvmArg>-Xms64m</jvmArg>
                        <jvmArg>-Xmx1024m</jvmArg>
                    </jvmArgs>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>





Exception:-



An exception or error caused a run to abort.

java.lang.ExceptionInInitializerError

         at org.apache.spark.Logging$class.initializeLogging(Logging.scala:121)

         at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

         at org.apache.spark.Logging$class.log(Logging.scala:50)

         at org.apache.spark.SparkContext.log(SparkContext.scala:79)

         at org.apache.spark.Logging$class.logInfo(Logging.scala:58)

         at org.apache.spark.SparkContext.logInfo(SparkContext.scala:79)

         at org.apache.spark.SparkContext.<init>(SparkContext.scala:211)

         at org.apache.spark.SparkContext.<init>(SparkContext.scala:147)

         at org.apache.spark.SharedSparkContext$class.beforeAll(SharedSparkContext.scala:33)

         at Test.beforeAll(Test.scala:10)

         at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)

         at Test.beforeAll(Test.scala:10)

         at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)

         at Test.run(Test.scala:10)

         at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)

         at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)

         at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)

         at scala.collection.immutable.List.foreach(List.scala:318)

         at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)

         at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)

         at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)

         at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)

         at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)

         at org.scalatest.tools.Runner$.run(Runner.scala:883)

         at org.scalatest.tools.Runner.run(Runner.scala)

         at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:138)

         at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)

         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

         at java.lang.reflect.Method.invoke(Method.java:498)

         at com.intellij.rt.execution.application.AppMain.main(AppMain.java:144)

Caused by: java.lang.IllegalStateException: FileNamePattern [-.log] does not contain a valid date format specifier

         at org.apache.log4j.rolling.TimeBasedRollingPolicy.activateOptions(TimeBasedRollingPolicy.java:172)

         at org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:307)

         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:172)

         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:149)

         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:104)

         at org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:842)

         at org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:768)

         at org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:648)

         at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:514)

         at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:580)

         at org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:526)

         at org.apache.log4j.LogManager.<clinit>(LogManager.java:127)

         ... 32 more





Process finished with exit code 0


********************** IMPORTANT--PLEASE READ ************************
This electronic message, including its attachments, is CONFIDENTIAL and may contain PROPRIETARY or LEGALLY PRIVILEGED or PROTECTED information and is intended for the authorized recipient of the sender.
If you are not the intended recipient, you are hereby notified that any use, disclosure, copying, or distribution of this message or any of the information included in it is unauthorized and strictly prohibited.
If you have received this message in error, please immediately notify the sender by reply e-mail and permanently delete this message and its attachments, along with any copies thereof, from all locations received (e.g., computer, mobile device, etc.).
Thank you.
********************************************************************

RE: scala test is unable to initialize spark context.

Posted by PS...@in.imshealth.com.
I am n where using log4j in my code ☹

From: Jeff Zhang [mailto:zjffdu@gmail.com]
Sent: Thursday, April 06, 2017 1:40 PM
To: Priyaranjan Swain <PS...@in.imshealth.com>; user@spark.apache.org
Subject: Re: scala test is unable to initialize spark context.


Seems it is caused by your log4j file


Caused by: java.lang.IllegalStateException: FileNamePattern [-.log] does not contain a valid date format specifier





<PS...@in.imshealth.com>>于2017年4月6日周四 下午4:03写道:
Hi All ,

   I am just trying to use scala test for testing a small spark code . But spark context is not getting initialized , while I am running test file .
I have given code, pom and exception I am getting in mail , please help me to understand what mistake I am doing , so that
Spark context is not getting initialized

Code:-

import org.apache.log4j.LogManager
import org.apache.spark.SharedSparkContext
import org.scalatest.FunSuite
import org.apache.spark.{SparkContext, SparkConf}

/**
 * Created by PSwain on 4/5/2017.
  */
class Test extends FunSuite with SharedSparkContext  {


  test("test initializing spark context") {
    val list = List(1, 2, 3, 4)
    val rdd = sc.parallelize(list)
    assert(list.length === rdd.count())
  }
}

POM File:-


<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <modelVersion>4.0.0</modelVersion>

    <groupId>tesing.loging</groupId>
    <artifactId>logging</artifactId>
    <version>1.0-SNAPSHOT</version>


    <repositories>
        <repository>
            <id>central</id>
            <name>central</name>
            <url>http://repo1.maven.org/maven/</url>
        </repository>
    </repositories>

    <dependencies>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.10</artifactId>
            <version>1.6.0</version>
            <type>test-jar</type>


        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.10</artifactId>
            <version>1.6.0</version>
        </dependency>

        <dependency>
            <groupId>org.scalatest</groupId>
            <artifactId>scalatest_2.10</artifactId>
            <version>2.2.6</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-hive_2.10</artifactId>
            <version>1.5.0</version>
            <scope>provided</scope>
        </dependency>
        <dependency>
            <groupId>com.databricks</groupId>
            <artifactId>spark-csv_2.10</artifactId>
            <version>1.3.0</version>
        </dependency>
        <dependency>
            <groupId>com.rxcorp.bdf.logging</groupId>
            <artifactId>loggingframework</artifactId>
            <version>1.0-SNAPSHOT</version>
        </dependency>
        <dependency>
            <groupId>mysql</groupId>
            <artifactId>mysql-connector-java</artifactId>
            <version>5.1.6</version>
            <scope>provided</scope>
        </dependency>
        <!-- Scala -->
        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>2.10.5</version>
            <scope>compile</scope>
            <optional>true</optional>
        </dependency>

        <dependency>
            <groupId>org.scalatest</groupId>
            <artifactId>scalatest</artifactId>
            <version>1.4.RC2</version>
        </dependency>

        <dependency>
            <groupId>log4j</groupId>
            <artifactId>log4j</artifactId>
            <version>1.2.17</version>
        </dependency>

        <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-compiler</artifactId>
            <version>2.10.5</version>
            <scope>compile</scope>
            <optional>true</optional>
        </dependency>
        <!-- END Scala -->
    </dependencies>
    <build>
        <sourceDirectory>src/main/scala</sourceDirectory>
        <plugins>
            <plugin>
                <artifactId>maven-assembly-plugin</artifactId>
                <version>2.2.1</version>
                <configuration>
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependencies</descriptorRef>
                    </descriptorRefs>
                </configuration>
                <executions>
                    <execution>
                        <id>make-assembly</id>
                        <phase>package</phase>
                        <goals>
                            <goal>single</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
            <plugin>
                <groupId>net.alchim31.maven</groupId>
                <artifactId>scala-maven-plugin</artifactId>
                <version>3.2.0</version>
                <executions>
                    <execution>
                        <goals>
                            <goal>compile</goal>
                            <goal>testCompile</goal>
                        </goals>
                    </execution>
                </executions>
                <configuration>
                    <sourceDir>src/main/scala</sourceDir>

                    <jvmArgs>
                        <jvmArg>-Xms64m</jvmArg>
                        <jvmArg>-Xmx1024m</jvmArg>
                    </jvmArgs>
                </configuration>
            </plugin>
        </plugins>
    </build>

</project>





Exception:-



An exception or error caused a run to abort.

java.lang.ExceptionInInitializerError

         at org.apache.spark.Logging$class.initializeLogging(Logging.scala:121)

         at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)

         at org.apache.spark.Logging$class.log(Logging.scala:50)

         at org.apache.spark.SparkContext.log(SparkContext.scala:79)

         at org.apache.spark.Logging$class.logInfo(Logging.scala:58)

         at org.apache.spark.SparkContext.logInfo(SparkContext.scala:79)

         at org.apache.spark.SparkContext.<init>(SparkContext.scala:211)

         at org.apache.spark.SparkContext.<init>(SparkContext.scala:147)

         at org.apache.spark.SharedSparkContext$class.beforeAll(SharedSparkContext.scala:33)

         at Test.beforeAll(Test.scala:10)

         at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)

         at Test.beforeAll(Test.scala:10)

         at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)

         at Test.run(Test.scala:10)

         at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)

         at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)

         at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)

         at scala.collection.immutable.List.foreach(List.scala:318)

         at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)

         at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)

         at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)

         at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)

         at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)

         at org.scalatest.tools.Runner$.run(Runner.scala:883)

         at org.scalatest.tools.Runner.run(Runner.scala)

         at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:138)

         at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)

         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)

         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)

         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)

         at java.lang.reflect.Method.invoke(Method.java:498)

         at com.intellij.rt.execution.application.AppMain.main(AppMain.java:144)

Caused by: java.lang.IllegalStateException: FileNamePattern [-.log] does not contain a valid date format specifier

         at org.apache.log4j.rolling.TimeBasedRollingPolicy.activateOptions(TimeBasedRollingPolicy.java:172)

         at org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:307)

         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:172)

         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:149)

         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:104)

         at org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:842)

         at org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:768)

         at org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:648)

         at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:514)

         at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:580)

         at org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:526)

         at org.apache.log4j.LogManager.<clinit>(LogManager.java:127)

         ... 32 more





Process finished with exit code 0


********************** IMPORTANT--PLEASE READ ************************
This electronic message, including its attachments, is CONFIDENTIAL and may contain PROPRIETARY or LEGALLY PRIVILEGED or PROTECTED information and is intended for the authorized recipient of the sender.
If you are not the intended recipient, you are hereby notified that any use, disclosure, copying, or distribution of this message or any of the information included in it is unauthorized and strictly prohibited.
If you have received this message in error, please immediately notify the sender by reply e-mail and permanently delete this message and its attachments, along with any copies thereof, from all locations received (e.g., computer, mobile device, etc.).
Thank you.
********************************************************************

Re: scala test is unable to initialize spark context.

Posted by Jeff Zhang <zj...@gmail.com>.
Seems it is caused by your log4j file

*Caused by: java.lang.IllegalStateException: FileNamePattern [-.log]
does not contain a valid date format specifier*




<PS...@in.imshealth.com>于2017年4月6日周四 下午4:03写道:

> Hi All ,
>
>
>
>    I am just trying to use scala test for testing a small spark code . But
> spark context is not getting initialized , while I am running test file .
>
> I have given code, pom and exception I am getting in mail , please help me
> to understand what mistake I am doing , so that
>
> Spark context is not getting initialized
>
>
>
> *Code:-*
>
>
>
> *import *org.apache.log4j.LogManager
> *import *org.apache.spark.SharedSparkContext
> *import *org.scalatest.FunSuite
> *import *org.apache.spark.{SparkContext, SparkConf}
>
>
>
>
> */**  * Created by PSwain on 4/5/2017.   */ **class *Test *extends *FunSuite
> *with *SharedSparkContext  {
>
>
>   test(*"test initializing spark context"*) {
>     *val *list = *List*(1, 2, 3, 4)
>     *val *rdd = sc.parallelize(list)
>     assert(list.length === rdd.count())
>   }
> }
>
>
>
> *POM File:-*
>
>
>
> *<?**xml version=**"1.0" **encoding=**"UTF-8"*
> *?>*<*project **xmlns=*
> *"http://maven.apache.org/POM/4.0.0 <http://maven.apache.org/POM/4.0.0>"         **xmlns:**xsi**=*
> *"http://www.w3.org/2001/XMLSchema-instance <http://www.w3.org/2001/XMLSchema-instance>"         **xsi**:schemaLocation=**"http://maven.apache.org/POM/4.0.0 <http://maven.apache.org/POM/4.0.0> http://maven.apache.org/xsd/maven-4.0.0.xsd <http://maven.apache.org/xsd/maven-4.0.0.xsd>"*>
>     <*modelVersion*>4.0.0</*modelVersion*>
>
>     <*groupId*>tesing.loging</*groupId*>
>     <*artifactId*>logging</*artifactId*>
>     <*version*>1.0-SNAPSHOT</*version*>
>
>
>     <*repositories*>
>         <*repository*>
>             <*id*>central</*id*>
>             <*name*>central</*name*>
>             <*url*>http://repo1.maven.org/maven/</*url*>
>         </*repository*>
>     </*repositories*>
>
>     <*dependencies*>
>         <*dependency*>
>             <*groupId*>org.apache.spark</*groupId*>
>             <*artifactId*>spark-core_2.10</*artifactId*>
>             <*version*>1.6.0</*version*>
>             <*type*>test-jar</*type*>
>
>
>         </*dependency*>
>         <*dependency*>
>             <*groupId*>org.apache.spark</*groupId*>
>             <*artifactId*>spark-sql_2.10</*artifactId*>
>             <*version*>1.6.0</*version*>
>         </*dependency*>
>
>         <*dependency*>
>             <*groupId*>org.scalatest</*groupId*>
>             <*artifactId*>scalatest_2.10</*artifactId*>
>             <*version*>2.2.6</*version*>
>         </*dependency*>
>
>         <*dependency*>
>             <*groupId*>org.apache.spark</*groupId*>
>             <*artifactId*>spark-hive_2.10</*artifactId*>
>             <*version*>1.5.0</*version*>
>             <*scope*>provided</*scope*>
>         </*dependency*>
>         <*dependency*>
>             <*groupId*>com.databricks</*groupId*>
>             <*artifactId*>spark-csv_2.10</*artifactId*>
>             <*version*>1.3.0</*version*>
>         </*dependency*>
>         <*dependency*>
>             <*groupId*>com.rxcorp.bdf.logging</*groupId*>
>             <*artifactId*>loggingframework</*artifactId*>
>             <*version*>1.0-SNAPSHOT</*version*>
>         </*dependency*>
>         <*dependency*>
>             <*groupId*>mysql</*groupId*>
>             <*artifactId*>mysql-connector-java</*artifactId*>
>             <*version*>5.1.6</*version*>
>             <*scope*>provided</*scope*>
>         </*dependency*>
>
> *<!-- Scala -->        *<*dependency*>
>             <*groupId*>org.scala-lang</*groupId*>
>             <*artifactId*>scala-library</*artifactId*>
>             <*version*>2.10.5</*version*>
>             <*scope*>compile</*scope*>
>             <*optional*>true</*optional*>
>         </*dependency*>
>
>         <*dependency*>
>             <*groupId*>org.scalatest</*groupId*>
>             <*artifactId*>scalatest</*artifactId*>
>             <*version*>1.4.RC2</*version*>
>         </*dependency*>
>
>         <*dependency*>
>             <*groupId*>log4j</*groupId*>
>             <*artifactId*>log4j</*artifactId*>
>             <*version*>1.2.17</*version*>
>         </*dependency*>
>
>         <*dependency*>
>             <*groupId*>org.scala-lang</*groupId*>
>             <*artifactId*>scala-compiler</*artifactId*>
>             <*version*>2.10.5</*version*>
>             <*scope*>compile</*scope*>
>             <*optional*>true</*optional*>
>         </*dependency*>
>
> *<!-- END Scala -->    *</*dependencies*>
>     <*build*>
>         <*sourceDirectory*>src/main/scala</*sourceDirectory*>
>         <*plugins*>
>             <*plugin*>
>                 <*artifactId*>maven-assembly-plugin</*artifactId*>
>                 <*version*>2.2.1</*version*>
>                 <*configuration*>
>                     <*descriptorRefs*>
>                         <*descriptorRef*>jar-with-dependencies</*descriptorRef*>
>                     </*descriptorRefs*>
>                 </*configuration*>
>                 <*executions*>
>                     <*execution*>
>                         <*id*>make-assembly</*id*>
>                         <*phase*>package</*phase*>
>                         <*goals*>
>                             <*goal*>single</*goal*>
>                         </*goals*>
>                     </*execution*>
>                 </*executions*>
>             </*plugin*>
>             <*plugin*>
>                 <*groupId*>net.alchim31.maven</*groupId*>
>                 <*artifactId*>scala-maven-plugin</*artifactId*>
>                 <*version*>3.2.0</*version*>
>                 <*executions*>
>                     <*execution*>
>                         <*goals*>
>                             <*goal*>compile</*goal*>
>                             <*goal*>testCompile</*goal*>
>                         </*goals*>
>                     </*execution*>
>                 </*executions*>
>                 <*configuration*>
>                     <*sourceDir*>src/main/scala</*sourceDir*>
>
>                     <*jvmArgs*>
>                         <*jvmArg*>-Xms64m</*jvmArg*>
>                         <*jvmArg*>-Xmx1024m</*jvmArg*>
>                     </*jvmArgs*>
>                 </*configuration*>
>             </*plugin*>
>         </*plugins*>
>     </*build*>
>
> </*project*>
>
>
>
>
>
> *Exception:-*
>
>
>
> *An exception or error caused a run to abort. *
>
> *java.lang.ExceptionInInitializerError*
>
> *         at org.apache.spark.Logging$class.initializeLogging(Logging.scala:121)*
>
> *         at org.apache.spark.Logging$class.initializeIfNecessary(Logging.scala:106)*
>
> *         at org.apache.spark.Logging$class.log(Logging.scala:50)*
>
> *         at org.apache.spark.SparkContext.log(SparkContext.scala:79)*
>
> *         at org.apache.spark.Logging$class.logInfo(Logging.scala:58)*
>
> *         at org.apache.spark.SparkContext.logInfo(SparkContext.scala:79)*
>
> *         at org.apache.spark.SparkContext.<init>(SparkContext.scala:211)*
>
> *         at org.apache.spark.SparkContext.<init>(SparkContext.scala:147)*
>
> *         at org.apache.spark.SharedSparkContext$class.beforeAll(SharedSparkContext.scala:33)*
>
> *         at Test.beforeAll(Test.scala:10)*
>
> *         at org.scalatest.BeforeAndAfterAll$class.beforeAll(BeforeAndAfterAll.scala:187)*
>
> *         at Test.beforeAll(Test.scala:10)*
>
> *         at org.scalatest.BeforeAndAfterAll$class.run(BeforeAndAfterAll.scala:253)*
>
> *         at Test.run(Test.scala:10)*
>
> *         at org.scalatest.tools.SuiteRunner.run(SuiteRunner.scala:55)*
>
> *         at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2563)*
>
> *         at org.scalatest.tools.Runner$$anonfun$doRunRunRunDaDoRunRun$3.apply(Runner.scala:2557)*
>
> *         at scala.collection.immutable.List.foreach(List.scala:318)*
>
> *         at org.scalatest.tools.Runner$.doRunRunRunDaDoRunRun(Runner.scala:2557)*
>
> *         at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1044)*
>
> *         at org.scalatest.tools.Runner$$anonfun$runOptionallyWithPassFailReporter$2.apply(Runner.scala:1043)*
>
> *         at org.scalatest.tools.Runner$.withClassLoaderAndDispatchReporter(Runner.scala:2722)*
>
> *         at org.scalatest.tools.Runner$.runOptionallyWithPassFailReporter(Runner.scala:1043)*
>
> *         at org.scalatest.tools.Runner$.run(Runner.scala:883)*
>
> *         at org.scalatest.tools.Runner.run(Runner.scala)*
>
> *         at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.runScalaTest2(ScalaTestRunner.java:138)*
>
> *         at org.jetbrains.plugins.scala.testingSupport.scalaTest.ScalaTestRunner.main(ScalaTestRunner.java:28)*
>
> *         at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)*
>
> *         at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)*
>
> *         at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)*
>
> *         at java.lang.reflect.Method.invoke(Method.java:498)*
>
> *         at com.intellij.rt.execution.application.AppMain.main(AppMain.java:144)*
>
> *Caused by: java.lang.IllegalStateException: FileNamePattern [-.log] does not contain a valid date format specifier*
>
> *         at org.apache.log4j.rolling.TimeBasedRollingPolicy.activateOptions(TimeBasedRollingPolicy.java:172)*
>
> *         at org.apache.log4j.config.PropertySetter.activate(PropertySetter.java:307)*
>
> *         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:172)*
>
> *         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:149)*
>
> *         at org.apache.log4j.config.PropertySetter.setProperties(PropertySetter.java:104)*
>
> *         at org.apache.log4j.PropertyConfigurator.parseAppender(PropertyConfigurator.java:842)*
>
> *         at org.apache.log4j.PropertyConfigurator.parseCategory(PropertyConfigurator.java:768)*
>
> *         at org.apache.log4j.PropertyConfigurator.configureRootCategory(PropertyConfigurator.java:648)*
>
> *         at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:514)*
>
> *         at org.apache.log4j.PropertyConfigurator.doConfigure(PropertyConfigurator.java:580)*
>
> *         at org.apache.log4j.helpers.OptionConverter.selectAndConfigure(OptionConverter.java:526)*
>
> *         at org.apache.log4j.LogManager.<clinit>(LogManager.java:127)*
>
> *         ... 32 more*
>
>
>
>
>
> *Process finished with exit code 0*
>
>
>
> ********************** IMPORTANT--PLEASE READ ************************
> This electronic message, including its attachments, is CONFIDENTIAL and
> may contain PROPRIETARY or LEGALLY PRIVILEGED or PROTECTED information and
> is intended for the authorized recipient of the sender.
> If you are not the intended recipient, you are hereby notified that any
> use, disclosure, copying, or distribution of this message or any of the
> information included in it is unauthorized and strictly prohibited.
> If you have received this message in error, please immediately notify the
> sender by reply e-mail and permanently delete this message and its
> attachments, along with any copies thereof, from all locations received
> (e.g., computer, mobile device, etc.).
> Thank you.
> ********************************************************************
>