You are viewing a plain text version of this content. The canonical link for it is here.
Posted to user@spark.apache.org by Dong Mo <mo...@gmail.com> on 2014/04/09 07:14:24 UTC
Error when compiling spark in IDEA and best practice to use IDE?
Dear list,
SBT compiles fine, but when I do the following:
sbt/sbt gen-idea
import project as SBT project to IDEA 13.1
Make Project
and these errors show up:
Error:(28, 8) object FileContext is not a member of package
org.apache.hadoop.fs
import org.apache.hadoop.fs.{FileContext, FileStatus, FileSystem, Path,
FileUtil}
^
Error:(31, 8) object Master is not a member of package
org.apache.hadoop.mapred
import org.apache.hadoop.mapred.Master
^
Error:(34, 26) object yarn is not a member of package org.apache.hadoop
import org.apache.hadoop.yarn.api._
^
Error:(35, 26) object yarn is not a member of package org.apache.hadoop
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
^
Error:(36, 26) object yarn is not a member of package org.apache.hadoop
import org.apache.hadoop.yarn.api.protocolrecords._
^
Error:(37, 26) object yarn is not a member of package org.apache.hadoop
import org.apache.hadoop.yarn.api.records._
^
Error:(38, 26) object yarn is not a member of package org.apache.hadoop
import org.apache.hadoop.yarn.client.YarnClientImpl
^
Error:(39, 26) object yarn is not a member of package org.apache.hadoop
import org.apache.hadoop.yarn.conf.YarnConfiguration
^
Error:(40, 26) object yarn is not a member of package org.apache.hadoop
import org.apache.hadoop.yarn.ipc.YarnRPC
^
Error:(41, 26) object yarn is not a member of package org.apache.hadoop
import org.apache.hadoop.yarn.util.{Apps, Records}
^
Error:(49, 11) not found: type YarnClientImpl
extends YarnClientImpl with Logging {
^
Error:(48, 20) not found: type ClientArguments
class Client(args: ClientArguments, conf: Configuration, sparkConf:
SparkConf)
^
Error:(51, 18) not found: type ClientArguments
def this(args: ClientArguments, sparkConf: SparkConf) =
^
Error:(54, 18) not found: type ClientArguments
def this(args: ClientArguments) = this(args, new SparkConf())
^
Error:(56, 12) not found: type YarnRPC
var rpc: YarnRPC = YarnRPC.create(conf)
^
Error:(56, 22) not found: value YarnRPC
var rpc: YarnRPC = YarnRPC.create(conf)
^
Error:(57, 17) not found: type YarnConfiguration
val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
^
Error:(57, 41) not found: type YarnConfiguration
val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
^
Error:(58, 59) value getCredentials is not a member of
org.apache.hadoop.security.UserGroupInformation
val credentials = UserGroupInformation.getCurrentUser().getCredentials()
^
Error:(60, 34) not found: type ClientDistributedCacheManager
private val distCacheMgr = new ClientDistributedCacheManager()
^
Error:(72, 5) not found: value init
init(yarnConf)
^
Error:(73, 5) not found: value start
start()
^
Error:(76, 24) value getNewApplication is not a member of
org.apache.spark.Logging
val newApp = super.getNewApplication()
^
Error:(137, 35) not found: type GetNewApplicationResponse
def verifyClusterResources(app: GetNewApplicationResponse) = {
^
Error:(156, 65) not found: type ApplicationSubmissionContext
def createApplicationSubmissionContext(appId: ApplicationId):
ApplicationSubmissionContext = {
^
Error:(156, 49) not found: type ApplicationId
def createApplicationSubmissionContext(appId: ApplicationId):
ApplicationSubmissionContext = {
^
Error:(118, 31) not found: type ApplicationId
def getAppStagingDir(appId: ApplicationId): String = {
^
Error:(224, 69) not found: type LocalResource
def prepareLocalResources(appStagingDir: String): HashMap[String,
LocalResource] = {
^
Error:(307, 39) not found: type LocalResource
localResources: HashMap[String, LocalResource],
^
Error:(343, 38) not found: type ContainerLaunchContext
env: HashMap[String, String]): ContainerLaunchContext = {
^
Error:(341, 15) not found: type GetNewApplicationResponse
newApp: GetNewApplicationResponse,
^
Error:(342, 39) not found: type LocalResource
localResources: HashMap[String, LocalResource],
^
Error:(426, 11) value submitApplication is not a member of
org.apache.spark.Logging
super.submitApplication(appContext)
^
Error:(423, 29) not found: type ApplicationSubmissionContext
def submitApp(appContext: ApplicationSubmissionContext) = {
^
Error:(429, 33) not found: type ApplicationId
def monitorApplication(appId: ApplicationId): Boolean = {
^
Error:(123, 25) not found: type YarnClusterMetrics
val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
^
Error:(123, 52) value getYarnClusterMetrics is not a member of
org.apache.spark.Logging
val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
^
Error:(127, 20) not found: type QueueInfo
val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
^
Error:(127, 38) value getQueueInfo is not a member of
org.apache.spark.Logging
val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
^
Error:(158, 22) not found: value Records
val appContext =
Records.newRecord(classOf[ApplicationSubmissionContext])
^
Error:(219, 14) not found: value FileContext
val fc = FileContext.getFileContext(qualPath.toUri(), conf)
^
Error:(230, 29) not found: value Master
val delegTokenRenewer = Master.getMasterPrincipal(conf)
^
Error:(242, 13) value addDelegationTokens is not a member of
org.apache.hadoop.fs.FileSystem
dstFs.addDelegationTokens(delegTokenRenewer, credentials)
^
Error:(244, 42) not found: type LocalResource
val localResources = HashMap[String, LocalResource]()
^
Error:(302, 43) value addCredentials is not a member of
org.apache.hadoop.security.UserGroupInformation
UserGroupInformation.getCurrentUser().addCredentials(credentials)
^
Error:(323, 5) not found: value Apps
Apps.setEnvFromInputString(env, System.getenv("SPARK_YARN_USER_ENV"))
^
Error:(330, 36) not found: type ClientArguments
def userArgsToString(clientArgs: ClientArguments): String = {
^
Error:(345, 23) not found: value Records
val amContainer = Records.newRecord(classOf[ContainerLaunchContext])
^
Error:(363, 16) not found: value Environment
new Path(Environment.PWD.$(),
YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) + " "
^
Error:(392, 21) not found: value Environment
javaCommand = Environment.JAVA_HOME.$() + "/bin/java"
^
Error:(405, 16) not found: value ApplicationConstants
" 1> " + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout" +
^
Error:(410, 22) not found: value Records
val capability =
Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
^
Error:(410, 72) not found: type Resource
val capability =
Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
^
Error:(434, 26) value getApplicationReport is not a member of
org.apache.spark.Logging
val report = super.getApplicationReport(appId)
^
Error:(474, 20) not found: type ClientArguments
val args = new ClientArguments(argStrings, sparkConf)
^
Error:(481, 31) not found: value YarnConfiguration
for (c <-
conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH)) {
^
Error:(487, 5) not found: value Apps
Apps.addToEnvironment(env, Environment.CLASSPATH.name,
Environment.PWD.$())
^
Error:(490, 7) not found: value Apps
Apps.addToEnvironment(env, Environment.CLASSPATH.name,
Environment.PWD.$() +
^
Error:(496, 7) not found: value Apps
Apps.addToEnvironment(env, Environment.CLASSPATH.name,
Environment.PWD.$() +
^
Error:(499, 5) not found: value Apps
Apps.addToEnvironment(env, Environment.CLASSPATH.name,
Environment.PWD.$() +
^
Error:(504, 7) not found: value Apps
Apps.addToEnvironment(env, Environment.CLASSPATH.name,
Environment.PWD.$() +
^
Error:(507, 5) not found: value Apps
Apps.addToEnvironment(env, Environment.CLASSPATH.name,
Environment.PWD.$() +
^
Any idea what's causing them, and maybe I am not using the best practice to
import Spark to IDE?
I would appreciate any suggestion on the best practice to import Spark to
any IDE.
Thank you
-Mo
Re: Error when compiling spark in IDEA and best practice to use IDE?
Posted by Dong Mo <mo...@gmail.com>.
All of these works
Thanks
-Mo
2014-04-09 2:34 GMT-04:00 Xiangrui Meng <me...@gmail.com>:
> After sbt/sbt gen-diea, do not import as an SBT project but choose
> "open project" and point it to the spark folder. -Xiangrui
>
> On Tue, Apr 8, 2014 at 10:45 PM, Sean Owen <so...@cloudera.com> wrote:
> > I let IntelliJ read the Maven build directly and that works fine.
> > --
> > Sean Owen | Director, Data Science | London
> >
> >
> > On Wed, Apr 9, 2014 at 6:14 AM, Dong Mo <mo...@gmail.com> wrote:
> >> Dear list,
> >>
> >> SBT compiles fine, but when I do the following:
> >> sbt/sbt gen-idea
> >> import project as SBT project to IDEA 13.1
> >> Make Project
> >> and these errors show up:
> >>
> >> Error:(28, 8) object FileContext is not a member of package
> >> org.apache.hadoop.fs
> >> import org.apache.hadoop.fs.{FileContext, FileStatus, FileSystem, Path,
> >> FileUtil}
> >> ^
> >> Error:(31, 8) object Master is not a member of package
> >> org.apache.hadoop.mapred
> >> import org.apache.hadoop.mapred.Master
> >> ^
> >> Error:(34, 26) object yarn is not a member of package org.apache.hadoop
> >> import org.apache.hadoop.yarn.api._
> >> ^
> >> Error:(35, 26) object yarn is not a member of package org.apache.hadoop
> >> import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
> >> ^
> >> Error:(36, 26) object yarn is not a member of package org.apache.hadoop
> >> import org.apache.hadoop.yarn.api.protocolrecords._
> >> ^
> >> Error:(37, 26) object yarn is not a member of package org.apache.hadoop
> >> import org.apache.hadoop.yarn.api.records._
> >> ^
> >> Error:(38, 26) object yarn is not a member of package org.apache.hadoop
> >> import org.apache.hadoop.yarn.client.YarnClientImpl
> >> ^
> >> Error:(39, 26) object yarn is not a member of package org.apache.hadoop
> >> import org.apache.hadoop.yarn.conf.YarnConfiguration
> >> ^
> >> Error:(40, 26) object yarn is not a member of package org.apache.hadoop
> >> import org.apache.hadoop.yarn.ipc.YarnRPC
> >> ^
> >> Error:(41, 26) object yarn is not a member of package org.apache.hadoop
> >> import org.apache.hadoop.yarn.util.{Apps, Records}
> >> ^
> >> Error:(49, 11) not found: type YarnClientImpl
> >> extends YarnClientImpl with Logging {
> >> ^
> >> Error:(48, 20) not found: type ClientArguments
> >> class Client(args: ClientArguments, conf: Configuration, sparkConf:
> >> SparkConf)
> >> ^
> >> Error:(51, 18) not found: type ClientArguments
> >> def this(args: ClientArguments, sparkConf: SparkConf) =
> >> ^
> >> Error:(54, 18) not found: type ClientArguments
> >> def this(args: ClientArguments) = this(args, new SparkConf())
> >> ^
> >> Error:(56, 12) not found: type YarnRPC
> >> var rpc: YarnRPC = YarnRPC.create(conf)
> >> ^
> >> Error:(56, 22) not found: value YarnRPC
> >> var rpc: YarnRPC = YarnRPC.create(conf)
> >> ^
> >> Error:(57, 17) not found: type YarnConfiguration
> >> val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
> >> ^
> >> Error:(57, 41) not found: type YarnConfiguration
> >> val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
> >> ^
> >> Error:(58, 59) value getCredentials is not a member of
> >> org.apache.hadoop.security.UserGroupInformation
> >> val credentials =
> UserGroupInformation.getCurrentUser().getCredentials()
> >> ^
> >> Error:(60, 34) not found: type ClientDistributedCacheManager
> >> private val distCacheMgr = new ClientDistributedCacheManager()
> >> ^
> >> Error:(72, 5) not found: value init
> >> init(yarnConf)
> >> ^
> >> Error:(73, 5) not found: value start
> >> start()
> >> ^
> >> Error:(76, 24) value getNewApplication is not a member of
> >> org.apache.spark.Logging
> >> val newApp = super.getNewApplication()
> >> ^
> >> Error:(137, 35) not found: type GetNewApplicationResponse
> >> def verifyClusterResources(app: GetNewApplicationResponse) = {
> >> ^
> >> Error:(156, 65) not found: type ApplicationSubmissionContext
> >> def createApplicationSubmissionContext(appId: ApplicationId):
> >> ApplicationSubmissionContext = {
> >> ^
> >> Error:(156, 49) not found: type ApplicationId
> >> def createApplicationSubmissionContext(appId: ApplicationId):
> >> ApplicationSubmissionContext = {
> >> ^
> >> Error:(118, 31) not found: type ApplicationId
> >> def getAppStagingDir(appId: ApplicationId): String = {
> >> ^
> >> Error:(224, 69) not found: type LocalResource
> >> def prepareLocalResources(appStagingDir: String): HashMap[String,
> >> LocalResource] = {
> >> ^
> >> Error:(307, 39) not found: type LocalResource
> >> localResources: HashMap[String, LocalResource],
> >> ^
> >> Error:(343, 38) not found: type ContainerLaunchContext
> >> env: HashMap[String, String]): ContainerLaunchContext = {
> >> ^
> >> Error:(341, 15) not found: type GetNewApplicationResponse
> >> newApp: GetNewApplicationResponse,
> >> ^
> >> Error:(342, 39) not found: type LocalResource
> >> localResources: HashMap[String, LocalResource],
> >> ^
> >> Error:(426, 11) value submitApplication is not a member of
> >> org.apache.spark.Logging
> >> super.submitApplication(appContext)
> >> ^
> >> Error:(423, 29) not found: type ApplicationSubmissionContext
> >> def submitApp(appContext: ApplicationSubmissionContext) = {
> >> ^
> >> Error:(429, 33) not found: type ApplicationId
> >> def monitorApplication(appId: ApplicationId): Boolean = {
> >> ^
> >> Error:(123, 25) not found: type YarnClusterMetrics
> >> val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
> >> ^
> >> Error:(123, 52) value getYarnClusterMetrics is not a member of
> >> org.apache.spark.Logging
> >> val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
> >> ^
> >> Error:(127, 20) not found: type QueueInfo
> >> val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
> >> ^
> >> Error:(127, 38) value getQueueInfo is not a member of
> >> org.apache.spark.Logging
> >> val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
> >> ^
> >> Error:(158, 22) not found: value Records
> >> val appContext =
> >> Records.newRecord(classOf[ApplicationSubmissionContext])
> >> ^
> >> Error:(219, 14) not found: value FileContext
> >> val fc = FileContext.getFileContext(qualPath.toUri(), conf)
> >> ^
> >> Error:(230, 29) not found: value Master
> >> val delegTokenRenewer = Master.getMasterPrincipal(conf)
> >> ^
> >> Error:(242, 13) value addDelegationTokens is not a member of
> >> org.apache.hadoop.fs.FileSystem
> >> dstFs.addDelegationTokens(delegTokenRenewer, credentials)
> >> ^
> >> Error:(244, 42) not found: type LocalResource
> >> val localResources = HashMap[String, LocalResource]()
> >> ^
> >> Error:(302, 43) value addCredentials is not a member of
> >> org.apache.hadoop.security.UserGroupInformation
> >> UserGroupInformation.getCurrentUser().addCredentials(credentials)
> >> ^
> >> Error:(323, 5) not found: value Apps
> >> Apps.setEnvFromInputString(env,
> System.getenv("SPARK_YARN_USER_ENV"))
> >> ^
> >> Error:(330, 36) not found: type ClientArguments
> >> def userArgsToString(clientArgs: ClientArguments): String = {
> >> ^
> >> Error:(345, 23) not found: value Records
> >> val amContainer = Records.newRecord(classOf[ContainerLaunchContext])
> >> ^
> >> Error:(363, 16) not found: value Environment
> >> new Path(Environment.PWD.$(),
> >> YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) + " "
> >> ^
> >> Error:(392, 21) not found: value Environment
> >> javaCommand = Environment.JAVA_HOME.$() + "/bin/java"
> >> ^
> >> Error:(405, 16) not found: value ApplicationConstants
> >> " 1> " + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout" +
> >> ^
> >> Error:(410, 22) not found: value Records
> >> val capability =
> >> Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
> >> ^
> >> Error:(410, 72) not found: type Resource
> >> val capability =
> >> Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
> >> ^
> >> Error:(434, 26) value getApplicationReport is not a member of
> >> org.apache.spark.Logging
> >> val report = super.getApplicationReport(appId)
> >> ^
> >> Error:(474, 20) not found: type ClientArguments
> >> val args = new ClientArguments(argStrings, sparkConf)
> >> ^
> >> Error:(481, 31) not found: value YarnConfiguration
> >> for (c <-
> conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH))
> >> {
> >> ^
> >> Error:(487, 5) not found: value Apps
> >> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> >> Environment.PWD.$())
> >> ^
> >> Error:(490, 7) not found: value Apps
> >> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> >> Environment.PWD.$() +
> >> ^
> >> Error:(496, 7) not found: value Apps
> >> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> >> Environment.PWD.$() +
> >> ^
> >> Error:(499, 5) not found: value Apps
> >> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> >> Environment.PWD.$() +
> >> ^
> >> Error:(504, 7) not found: value Apps
> >> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> >> Environment.PWD.$() +
> >> ^
> >> Error:(507, 5) not found: value Apps
> >> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> >> Environment.PWD.$() +
> >> ^
> >>
> >> Any idea what's causing them, and maybe I am not using the best
> practice to
> >> import Spark to IDE?
> >>
> >> I would appreciate any suggestion on the best practice to import Spark
> to
> >> any IDE.
> >>
> >> Thank you
> >>
> >> -Mo
>
Re: Error when compiling spark in IDEA and best practice to use IDE?
Posted by Xiangrui Meng <me...@gmail.com>.
After sbt/sbt gen-diea, do not import as an SBT project but choose
"open project" and point it to the spark folder. -Xiangrui
On Tue, Apr 8, 2014 at 10:45 PM, Sean Owen <so...@cloudera.com> wrote:
> I let IntelliJ read the Maven build directly and that works fine.
> --
> Sean Owen | Director, Data Science | London
>
>
> On Wed, Apr 9, 2014 at 6:14 AM, Dong Mo <mo...@gmail.com> wrote:
>> Dear list,
>>
>> SBT compiles fine, but when I do the following:
>> sbt/sbt gen-idea
>> import project as SBT project to IDEA 13.1
>> Make Project
>> and these errors show up:
>>
>> Error:(28, 8) object FileContext is not a member of package
>> org.apache.hadoop.fs
>> import org.apache.hadoop.fs.{FileContext, FileStatus, FileSystem, Path,
>> FileUtil}
>> ^
>> Error:(31, 8) object Master is not a member of package
>> org.apache.hadoop.mapred
>> import org.apache.hadoop.mapred.Master
>> ^
>> Error:(34, 26) object yarn is not a member of package org.apache.hadoop
>> import org.apache.hadoop.yarn.api._
>> ^
>> Error:(35, 26) object yarn is not a member of package org.apache.hadoop
>> import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
>> ^
>> Error:(36, 26) object yarn is not a member of package org.apache.hadoop
>> import org.apache.hadoop.yarn.api.protocolrecords._
>> ^
>> Error:(37, 26) object yarn is not a member of package org.apache.hadoop
>> import org.apache.hadoop.yarn.api.records._
>> ^
>> Error:(38, 26) object yarn is not a member of package org.apache.hadoop
>> import org.apache.hadoop.yarn.client.YarnClientImpl
>> ^
>> Error:(39, 26) object yarn is not a member of package org.apache.hadoop
>> import org.apache.hadoop.yarn.conf.YarnConfiguration
>> ^
>> Error:(40, 26) object yarn is not a member of package org.apache.hadoop
>> import org.apache.hadoop.yarn.ipc.YarnRPC
>> ^
>> Error:(41, 26) object yarn is not a member of package org.apache.hadoop
>> import org.apache.hadoop.yarn.util.{Apps, Records}
>> ^
>> Error:(49, 11) not found: type YarnClientImpl
>> extends YarnClientImpl with Logging {
>> ^
>> Error:(48, 20) not found: type ClientArguments
>> class Client(args: ClientArguments, conf: Configuration, sparkConf:
>> SparkConf)
>> ^
>> Error:(51, 18) not found: type ClientArguments
>> def this(args: ClientArguments, sparkConf: SparkConf) =
>> ^
>> Error:(54, 18) not found: type ClientArguments
>> def this(args: ClientArguments) = this(args, new SparkConf())
>> ^
>> Error:(56, 12) not found: type YarnRPC
>> var rpc: YarnRPC = YarnRPC.create(conf)
>> ^
>> Error:(56, 22) not found: value YarnRPC
>> var rpc: YarnRPC = YarnRPC.create(conf)
>> ^
>> Error:(57, 17) not found: type YarnConfiguration
>> val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
>> ^
>> Error:(57, 41) not found: type YarnConfiguration
>> val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
>> ^
>> Error:(58, 59) value getCredentials is not a member of
>> org.apache.hadoop.security.UserGroupInformation
>> val credentials = UserGroupInformation.getCurrentUser().getCredentials()
>> ^
>> Error:(60, 34) not found: type ClientDistributedCacheManager
>> private val distCacheMgr = new ClientDistributedCacheManager()
>> ^
>> Error:(72, 5) not found: value init
>> init(yarnConf)
>> ^
>> Error:(73, 5) not found: value start
>> start()
>> ^
>> Error:(76, 24) value getNewApplication is not a member of
>> org.apache.spark.Logging
>> val newApp = super.getNewApplication()
>> ^
>> Error:(137, 35) not found: type GetNewApplicationResponse
>> def verifyClusterResources(app: GetNewApplicationResponse) = {
>> ^
>> Error:(156, 65) not found: type ApplicationSubmissionContext
>> def createApplicationSubmissionContext(appId: ApplicationId):
>> ApplicationSubmissionContext = {
>> ^
>> Error:(156, 49) not found: type ApplicationId
>> def createApplicationSubmissionContext(appId: ApplicationId):
>> ApplicationSubmissionContext = {
>> ^
>> Error:(118, 31) not found: type ApplicationId
>> def getAppStagingDir(appId: ApplicationId): String = {
>> ^
>> Error:(224, 69) not found: type LocalResource
>> def prepareLocalResources(appStagingDir: String): HashMap[String,
>> LocalResource] = {
>> ^
>> Error:(307, 39) not found: type LocalResource
>> localResources: HashMap[String, LocalResource],
>> ^
>> Error:(343, 38) not found: type ContainerLaunchContext
>> env: HashMap[String, String]): ContainerLaunchContext = {
>> ^
>> Error:(341, 15) not found: type GetNewApplicationResponse
>> newApp: GetNewApplicationResponse,
>> ^
>> Error:(342, 39) not found: type LocalResource
>> localResources: HashMap[String, LocalResource],
>> ^
>> Error:(426, 11) value submitApplication is not a member of
>> org.apache.spark.Logging
>> super.submitApplication(appContext)
>> ^
>> Error:(423, 29) not found: type ApplicationSubmissionContext
>> def submitApp(appContext: ApplicationSubmissionContext) = {
>> ^
>> Error:(429, 33) not found: type ApplicationId
>> def monitorApplication(appId: ApplicationId): Boolean = {
>> ^
>> Error:(123, 25) not found: type YarnClusterMetrics
>> val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
>> ^
>> Error:(123, 52) value getYarnClusterMetrics is not a member of
>> org.apache.spark.Logging
>> val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
>> ^
>> Error:(127, 20) not found: type QueueInfo
>> val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
>> ^
>> Error:(127, 38) value getQueueInfo is not a member of
>> org.apache.spark.Logging
>> val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
>> ^
>> Error:(158, 22) not found: value Records
>> val appContext =
>> Records.newRecord(classOf[ApplicationSubmissionContext])
>> ^
>> Error:(219, 14) not found: value FileContext
>> val fc = FileContext.getFileContext(qualPath.toUri(), conf)
>> ^
>> Error:(230, 29) not found: value Master
>> val delegTokenRenewer = Master.getMasterPrincipal(conf)
>> ^
>> Error:(242, 13) value addDelegationTokens is not a member of
>> org.apache.hadoop.fs.FileSystem
>> dstFs.addDelegationTokens(delegTokenRenewer, credentials)
>> ^
>> Error:(244, 42) not found: type LocalResource
>> val localResources = HashMap[String, LocalResource]()
>> ^
>> Error:(302, 43) value addCredentials is not a member of
>> org.apache.hadoop.security.UserGroupInformation
>> UserGroupInformation.getCurrentUser().addCredentials(credentials)
>> ^
>> Error:(323, 5) not found: value Apps
>> Apps.setEnvFromInputString(env, System.getenv("SPARK_YARN_USER_ENV"))
>> ^
>> Error:(330, 36) not found: type ClientArguments
>> def userArgsToString(clientArgs: ClientArguments): String = {
>> ^
>> Error:(345, 23) not found: value Records
>> val amContainer = Records.newRecord(classOf[ContainerLaunchContext])
>> ^
>> Error:(363, 16) not found: value Environment
>> new Path(Environment.PWD.$(),
>> YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) + " "
>> ^
>> Error:(392, 21) not found: value Environment
>> javaCommand = Environment.JAVA_HOME.$() + "/bin/java"
>> ^
>> Error:(405, 16) not found: value ApplicationConstants
>> " 1> " + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout" +
>> ^
>> Error:(410, 22) not found: value Records
>> val capability =
>> Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
>> ^
>> Error:(410, 72) not found: type Resource
>> val capability =
>> Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
>> ^
>> Error:(434, 26) value getApplicationReport is not a member of
>> org.apache.spark.Logging
>> val report = super.getApplicationReport(appId)
>> ^
>> Error:(474, 20) not found: type ClientArguments
>> val args = new ClientArguments(argStrings, sparkConf)
>> ^
>> Error:(481, 31) not found: value YarnConfiguration
>> for (c <- conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH))
>> {
>> ^
>> Error:(487, 5) not found: value Apps
>> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
>> Environment.PWD.$())
>> ^
>> Error:(490, 7) not found: value Apps
>> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
>> Environment.PWD.$() +
>> ^
>> Error:(496, 7) not found: value Apps
>> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
>> Environment.PWD.$() +
>> ^
>> Error:(499, 5) not found: value Apps
>> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
>> Environment.PWD.$() +
>> ^
>> Error:(504, 7) not found: value Apps
>> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
>> Environment.PWD.$() +
>> ^
>> Error:(507, 5) not found: value Apps
>> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
>> Environment.PWD.$() +
>> ^
>>
>> Any idea what's causing them, and maybe I am not using the best practice to
>> import Spark to IDE?
>>
>> I would appreciate any suggestion on the best practice to import Spark to
>> any IDE.
>>
>> Thank you
>>
>> -Mo
Re: Error when compiling spark in IDEA and best practice to use IDE?
Posted by Sean Owen <so...@cloudera.com>.
I let IntelliJ read the Maven build directly and that works fine.
--
Sean Owen | Director, Data Science | London
On Wed, Apr 9, 2014 at 6:14 AM, Dong Mo <mo...@gmail.com> wrote:
> Dear list,
>
> SBT compiles fine, but when I do the following:
> sbt/sbt gen-idea
> import project as SBT project to IDEA 13.1
> Make Project
> and these errors show up:
>
> Error:(28, 8) object FileContext is not a member of package
> org.apache.hadoop.fs
> import org.apache.hadoop.fs.{FileContext, FileStatus, FileSystem, Path,
> FileUtil}
> ^
> Error:(31, 8) object Master is not a member of package
> org.apache.hadoop.mapred
> import org.apache.hadoop.mapred.Master
> ^
> Error:(34, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.api._
> ^
> Error:(35, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
> ^
> Error:(36, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.api.protocolrecords._
> ^
> Error:(37, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.api.records._
> ^
> Error:(38, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.client.YarnClientImpl
> ^
> Error:(39, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.conf.YarnConfiguration
> ^
> Error:(40, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.ipc.YarnRPC
> ^
> Error:(41, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.util.{Apps, Records}
> ^
> Error:(49, 11) not found: type YarnClientImpl
> extends YarnClientImpl with Logging {
> ^
> Error:(48, 20) not found: type ClientArguments
> class Client(args: ClientArguments, conf: Configuration, sparkConf:
> SparkConf)
> ^
> Error:(51, 18) not found: type ClientArguments
> def this(args: ClientArguments, sparkConf: SparkConf) =
> ^
> Error:(54, 18) not found: type ClientArguments
> def this(args: ClientArguments) = this(args, new SparkConf())
> ^
> Error:(56, 12) not found: type YarnRPC
> var rpc: YarnRPC = YarnRPC.create(conf)
> ^
> Error:(56, 22) not found: value YarnRPC
> var rpc: YarnRPC = YarnRPC.create(conf)
> ^
> Error:(57, 17) not found: type YarnConfiguration
> val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
> ^
> Error:(57, 41) not found: type YarnConfiguration
> val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
> ^
> Error:(58, 59) value getCredentials is not a member of
> org.apache.hadoop.security.UserGroupInformation
> val credentials = UserGroupInformation.getCurrentUser().getCredentials()
> ^
> Error:(60, 34) not found: type ClientDistributedCacheManager
> private val distCacheMgr = new ClientDistributedCacheManager()
> ^
> Error:(72, 5) not found: value init
> init(yarnConf)
> ^
> Error:(73, 5) not found: value start
> start()
> ^
> Error:(76, 24) value getNewApplication is not a member of
> org.apache.spark.Logging
> val newApp = super.getNewApplication()
> ^
> Error:(137, 35) not found: type GetNewApplicationResponse
> def verifyClusterResources(app: GetNewApplicationResponse) = {
> ^
> Error:(156, 65) not found: type ApplicationSubmissionContext
> def createApplicationSubmissionContext(appId: ApplicationId):
> ApplicationSubmissionContext = {
> ^
> Error:(156, 49) not found: type ApplicationId
> def createApplicationSubmissionContext(appId: ApplicationId):
> ApplicationSubmissionContext = {
> ^
> Error:(118, 31) not found: type ApplicationId
> def getAppStagingDir(appId: ApplicationId): String = {
> ^
> Error:(224, 69) not found: type LocalResource
> def prepareLocalResources(appStagingDir: String): HashMap[String,
> LocalResource] = {
> ^
> Error:(307, 39) not found: type LocalResource
> localResources: HashMap[String, LocalResource],
> ^
> Error:(343, 38) not found: type ContainerLaunchContext
> env: HashMap[String, String]): ContainerLaunchContext = {
> ^
> Error:(341, 15) not found: type GetNewApplicationResponse
> newApp: GetNewApplicationResponse,
> ^
> Error:(342, 39) not found: type LocalResource
> localResources: HashMap[String, LocalResource],
> ^
> Error:(426, 11) value submitApplication is not a member of
> org.apache.spark.Logging
> super.submitApplication(appContext)
> ^
> Error:(423, 29) not found: type ApplicationSubmissionContext
> def submitApp(appContext: ApplicationSubmissionContext) = {
> ^
> Error:(429, 33) not found: type ApplicationId
> def monitorApplication(appId: ApplicationId): Boolean = {
> ^
> Error:(123, 25) not found: type YarnClusterMetrics
> val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
> ^
> Error:(123, 52) value getYarnClusterMetrics is not a member of
> org.apache.spark.Logging
> val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
> ^
> Error:(127, 20) not found: type QueueInfo
> val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
> ^
> Error:(127, 38) value getQueueInfo is not a member of
> org.apache.spark.Logging
> val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
> ^
> Error:(158, 22) not found: value Records
> val appContext =
> Records.newRecord(classOf[ApplicationSubmissionContext])
> ^
> Error:(219, 14) not found: value FileContext
> val fc = FileContext.getFileContext(qualPath.toUri(), conf)
> ^
> Error:(230, 29) not found: value Master
> val delegTokenRenewer = Master.getMasterPrincipal(conf)
> ^
> Error:(242, 13) value addDelegationTokens is not a member of
> org.apache.hadoop.fs.FileSystem
> dstFs.addDelegationTokens(delegTokenRenewer, credentials)
> ^
> Error:(244, 42) not found: type LocalResource
> val localResources = HashMap[String, LocalResource]()
> ^
> Error:(302, 43) value addCredentials is not a member of
> org.apache.hadoop.security.UserGroupInformation
> UserGroupInformation.getCurrentUser().addCredentials(credentials)
> ^
> Error:(323, 5) not found: value Apps
> Apps.setEnvFromInputString(env, System.getenv("SPARK_YARN_USER_ENV"))
> ^
> Error:(330, 36) not found: type ClientArguments
> def userArgsToString(clientArgs: ClientArguments): String = {
> ^
> Error:(345, 23) not found: value Records
> val amContainer = Records.newRecord(classOf[ContainerLaunchContext])
> ^
> Error:(363, 16) not found: value Environment
> new Path(Environment.PWD.$(),
> YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) + " "
> ^
> Error:(392, 21) not found: value Environment
> javaCommand = Environment.JAVA_HOME.$() + "/bin/java"
> ^
> Error:(405, 16) not found: value ApplicationConstants
> " 1> " + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout" +
> ^
> Error:(410, 22) not found: value Records
> val capability =
> Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
> ^
> Error:(410, 72) not found: type Resource
> val capability =
> Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
> ^
> Error:(434, 26) value getApplicationReport is not a member of
> org.apache.spark.Logging
> val report = super.getApplicationReport(appId)
> ^
> Error:(474, 20) not found: type ClientArguments
> val args = new ClientArguments(argStrings, sparkConf)
> ^
> Error:(481, 31) not found: value YarnConfiguration
> for (c <- conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH))
> {
> ^
> Error:(487, 5) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$())
> ^
> Error:(490, 7) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
> Error:(496, 7) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
> Error:(499, 5) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
> Error:(504, 7) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
> Error:(507, 5) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
>
> Any idea what's causing them, and maybe I am not using the best practice to
> import Spark to IDE?
>
> I would appreciate any suggestion on the best practice to import Spark to
> any IDE.
>
> Thank you
>
> -Mo
Re: Error when compiling spark in IDEA and best practice to use IDE?
Posted by DB Tsai <db...@stanford.edu>.
Hi Dong,
This is pretty much what I did. I run into the same issue you have.
Since I'm not developing yarn related stuff, I just excluded those two
yarn related project from intellji, and it works. PS, you may need to
exclude java8 project as well now.
Sincerely,
DB Tsai
-------------------------------------------------------
My Blog: https://www.dbtsai.com
LinkedIn: https://www.linkedin.com/in/dbtsai
On Tue, Apr 8, 2014 at 10:14 PM, Dong Mo <mo...@gmail.com> wrote:
> Dear list,
>
> SBT compiles fine, but when I do the following:
> sbt/sbt gen-idea
> import project as SBT project to IDEA 13.1
> Make Project
> and these errors show up:
>
> Error:(28, 8) object FileContext is not a member of package
> org.apache.hadoop.fs
> import org.apache.hadoop.fs.{FileContext, FileStatus, FileSystem, Path,
> FileUtil}
> ^
> Error:(31, 8) object Master is not a member of package
> org.apache.hadoop.mapred
> import org.apache.hadoop.mapred.Master
> ^
> Error:(34, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.api._
> ^
> Error:(35, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.api.ApplicationConstants.Environment
> ^
> Error:(36, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.api.protocolrecords._
> ^
> Error:(37, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.api.records._
> ^
> Error:(38, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.client.YarnClientImpl
> ^
> Error:(39, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.conf.YarnConfiguration
> ^
> Error:(40, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.ipc.YarnRPC
> ^
> Error:(41, 26) object yarn is not a member of package org.apache.hadoop
> import org.apache.hadoop.yarn.util.{Apps, Records}
> ^
> Error:(49, 11) not found: type YarnClientImpl
> extends YarnClientImpl with Logging {
> ^
> Error:(48, 20) not found: type ClientArguments
> class Client(args: ClientArguments, conf: Configuration, sparkConf:
> SparkConf)
> ^
> Error:(51, 18) not found: type ClientArguments
> def this(args: ClientArguments, sparkConf: SparkConf) =
> ^
> Error:(54, 18) not found: type ClientArguments
> def this(args: ClientArguments) = this(args, new SparkConf())
> ^
> Error:(56, 12) not found: type YarnRPC
> var rpc: YarnRPC = YarnRPC.create(conf)
> ^
> Error:(56, 22) not found: value YarnRPC
> var rpc: YarnRPC = YarnRPC.create(conf)
> ^
> Error:(57, 17) not found: type YarnConfiguration
> val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
> ^
> Error:(57, 41) not found: type YarnConfiguration
> val yarnConf: YarnConfiguration = new YarnConfiguration(conf)
> ^
> Error:(58, 59) value getCredentials is not a member of
> org.apache.hadoop.security.UserGroupInformation
> val credentials = UserGroupInformation.getCurrentUser().getCredentials()
> ^
> Error:(60, 34) not found: type ClientDistributedCacheManager
> private val distCacheMgr = new ClientDistributedCacheManager()
> ^
> Error:(72, 5) not found: value init
> init(yarnConf)
> ^
> Error:(73, 5) not found: value start
> start()
> ^
> Error:(76, 24) value getNewApplication is not a member of
> org.apache.spark.Logging
> val newApp = super.getNewApplication()
> ^
> Error:(137, 35) not found: type GetNewApplicationResponse
> def verifyClusterResources(app: GetNewApplicationResponse) = {
> ^
> Error:(156, 65) not found: type ApplicationSubmissionContext
> def createApplicationSubmissionContext(appId: ApplicationId):
> ApplicationSubmissionContext = {
> ^
> Error:(156, 49) not found: type ApplicationId
> def createApplicationSubmissionContext(appId: ApplicationId):
> ApplicationSubmissionContext = {
> ^
> Error:(118, 31) not found: type ApplicationId
> def getAppStagingDir(appId: ApplicationId): String = {
> ^
> Error:(224, 69) not found: type LocalResource
> def prepareLocalResources(appStagingDir: String): HashMap[String,
> LocalResource] = {
> ^
> Error:(307, 39) not found: type LocalResource
> localResources: HashMap[String, LocalResource],
> ^
> Error:(343, 38) not found: type ContainerLaunchContext
> env: HashMap[String, String]): ContainerLaunchContext = {
> ^
> Error:(341, 15) not found: type GetNewApplicationResponse
> newApp: GetNewApplicationResponse,
> ^
> Error:(342, 39) not found: type LocalResource
> localResources: HashMap[String, LocalResource],
> ^
> Error:(426, 11) value submitApplication is not a member of
> org.apache.spark.Logging
> super.submitApplication(appContext)
> ^
> Error:(423, 29) not found: type ApplicationSubmissionContext
> def submitApp(appContext: ApplicationSubmissionContext) = {
> ^
> Error:(429, 33) not found: type ApplicationId
> def monitorApplication(appId: ApplicationId): Boolean = {
> ^
> Error:(123, 25) not found: type YarnClusterMetrics
> val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
> ^
> Error:(123, 52) value getYarnClusterMetrics is not a member of
> org.apache.spark.Logging
> val clusterMetrics: YarnClusterMetrics = super.getYarnClusterMetrics
> ^
> Error:(127, 20) not found: type QueueInfo
> val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
> ^
> Error:(127, 38) value getQueueInfo is not a member of
> org.apache.spark.Logging
> val queueInfo: QueueInfo = super.getQueueInfo(args.amQueue)
> ^
> Error:(158, 22) not found: value Records
> val appContext =
> Records.newRecord(classOf[ApplicationSubmissionContext])
> ^
> Error:(219, 14) not found: value FileContext
> val fc = FileContext.getFileContext(qualPath.toUri(), conf)
> ^
> Error:(230, 29) not found: value Master
> val delegTokenRenewer = Master.getMasterPrincipal(conf)
> ^
> Error:(242, 13) value addDelegationTokens is not a member of
> org.apache.hadoop.fs.FileSystem
> dstFs.addDelegationTokens(delegTokenRenewer, credentials)
> ^
> Error:(244, 42) not found: type LocalResource
> val localResources = HashMap[String, LocalResource]()
> ^
> Error:(302, 43) value addCredentials is not a member of
> org.apache.hadoop.security.UserGroupInformation
> UserGroupInformation.getCurrentUser().addCredentials(credentials)
> ^
> Error:(323, 5) not found: value Apps
> Apps.setEnvFromInputString(env, System.getenv("SPARK_YARN_USER_ENV"))
> ^
> Error:(330, 36) not found: type ClientArguments
> def userArgsToString(clientArgs: ClientArguments): String = {
> ^
> Error:(345, 23) not found: value Records
> val amContainer = Records.newRecord(classOf[ContainerLaunchContext])
> ^
> Error:(363, 16) not found: value Environment
> new Path(Environment.PWD.$(),
> YarnConfiguration.DEFAULT_CONTAINER_TEMP_DIR) + " "
> ^
> Error:(392, 21) not found: value Environment
> javaCommand = Environment.JAVA_HOME.$() + "/bin/java"
> ^
> Error:(405, 16) not found: value ApplicationConstants
> " 1> " + ApplicationConstants.LOG_DIR_EXPANSION_VAR + "/stdout" +
> ^
> Error:(410, 22) not found: value Records
> val capability =
> Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
> ^
> Error:(410, 72) not found: type Resource
> val capability =
> Records.newRecord(classOf[Resource]).asInstanceOf[Resource]
> ^
> Error:(434, 26) value getApplicationReport is not a member of
> org.apache.spark.Logging
> val report = super.getApplicationReport(appId)
> ^
> Error:(474, 20) not found: type ClientArguments
> val args = new ClientArguments(argStrings, sparkConf)
> ^
> Error:(481, 31) not found: value YarnConfiguration
> for (c <- conf.getStrings(YarnConfiguration.YARN_APPLICATION_CLASSPATH))
> {
> ^
> Error:(487, 5) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$())
> ^
> Error:(490, 7) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
> Error:(496, 7) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
> Error:(499, 5) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
> Error:(504, 7) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
> Error:(507, 5) not found: value Apps
> Apps.addToEnvironment(env, Environment.CLASSPATH.name,
> Environment.PWD.$() +
> ^
>
> Any idea what's causing them, and maybe I am not using the best practice to
> import Spark to IDE?
>
> I would appreciate any suggestion on the best practice to import Spark to
> any IDE.
>
> Thank you
>
> -Mo