You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by pw...@apache.org on 2013/12/14 09:42:01 UTC

[17/50] [abbrv] git commit: Merge branch 'master' into scala-2.10-wip

Merge branch 'master' into scala-2.10-wip

Conflicts:
	core/src/main/scala/org/apache/spark/rdd/RDD.scala
	project/SparkBuild.scala


Project: http://git-wip-us.apache.org/repos/asf/incubator-spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-spark/commit/44fd30d3
Tree: http://git-wip-us.apache.org/repos/asf/incubator-spark/tree/44fd30d3
Diff: http://git-wip-us.apache.org/repos/asf/incubator-spark/diff/44fd30d3

Branch: refs/heads/master
Commit: 44fd30d3fbcf830deecbe8ea3e8ea165e74e6edd
Parents: 489862a 62889c4
Author: Prashant Sharma <pr...@imaginea.com>
Authored: Mon Nov 25 18:10:54 2013 +0530
Committer: Prashant Sharma <pr...@imaginea.com>
Committed: Mon Nov 25 18:10:54 2013 +0530

----------------------------------------------------------------------
 conf/metrics.properties.template                |  8 ++
 core/pom.xml                                    |  4 +
 .../spark/metrics/sink/GraphiteSink.scala       | 82 +++++++++++++++++
 .../main/scala/org/apache/spark/rdd/RDD.scala   | 16 +++-
 .../apache/spark/rdd/ZippedPartitionsRDD.scala  | 21 +++--
 .../apache/spark/scheduler/ShuffleMapTask.scala |  9 +-
 .../spark/storage/BlockObjectWriter.scala       |  2 +
 .../org/apache/spark/util/AppendOnlyMap.scala   | 93 ++++++++++---------
 .../scala/org/apache/spark/util/Utils.scala     | 24 +++++
 .../org/apache/spark/util/XORShiftRandom.scala  | 94 ++++++++++++++++++++
 .../apache/spark/util/XORShiftRandomSuite.scala | 76 ++++++++++++++++
 docs/monitoring.md                              |  1 +
 docs/tuning.md                                  |  2 +-
 .../apache/spark/mllib/clustering/KMeans.scala  | 11 +--
 pom.xml                                         |  5 ++
 project/SparkBuild.scala                        |  3 +-
 16 files changed, 388 insertions(+), 63 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/44fd30d3/core/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/44fd30d3/core/src/main/scala/org/apache/spark/rdd/RDD.scala
----------------------------------------------------------------------
diff --cc core/src/main/scala/org/apache/spark/rdd/RDD.scala
index 3c237ca,7623c44..5f6407a
--- a/core/src/main/scala/org/apache/spark/rdd/RDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/RDD.scala
@@@ -548,20 -545,35 +548,30 @@@ abstract class RDD[T: ClassTag]
     * *same number of partitions*, but does *not* require them to have the same number
     * of elements in each partition.
     */
 -  def zipPartitions[B: ClassManifest, V: ClassManifest]
 -      (rdd2: RDD[B], preservesPartitioning: Boolean)
 -      (f: (Iterator[T], Iterator[B]) => Iterator[V]): RDD[V] =
 -    new ZippedPartitionsRDD2(sc, sc.clean(f), this, rdd2, preservesPartitioning)
 -
 -  def zipPartitions[B: ClassManifest, V: ClassManifest]
 +  def zipPartitions[B: ClassTag, V: ClassTag]
        (rdd2: RDD[B])
        (f: (Iterator[T], Iterator[B]) => Iterator[V]): RDD[V] =
-     new ZippedPartitionsRDD2(sc, sc.clean(f), this, rdd2)
+     new ZippedPartitionsRDD2(sc, sc.clean(f), this, rdd2, false)
+ 
+   def zipPartitions[B: ClassManifest, C: ClassManifest, V: ClassManifest]
+       (rdd2: RDD[B], rdd3: RDD[C], preservesPartitioning: Boolean)
+       (f: (Iterator[T], Iterator[B], Iterator[C]) => Iterator[V]): RDD[V] =
+     new ZippedPartitionsRDD3(sc, sc.clean(f), this, rdd2, rdd3, preservesPartitioning)
  
 -  def zipPartitions[B: ClassManifest, C: ClassManifest, V: ClassManifest]
 +  def zipPartitions[B: ClassTag, C: ClassTag, V: ClassTag]
        (rdd2: RDD[B], rdd3: RDD[C])
        (f: (Iterator[T], Iterator[B], Iterator[C]) => Iterator[V]): RDD[V] =
-     new ZippedPartitionsRDD3(sc, sc.clean(f), this, rdd2, rdd3)
+     new ZippedPartitionsRDD3(sc, sc.clean(f), this, rdd2, rdd3, false)
+ 
+   def zipPartitions[B: ClassManifest, C: ClassManifest, D: ClassManifest, V: ClassManifest]
+       (rdd2: RDD[B], rdd3: RDD[C], rdd4: RDD[D], preservesPartitioning: Boolean)
+       (f: (Iterator[T], Iterator[B], Iterator[C], Iterator[D]) => Iterator[V]): RDD[V] =
+     new ZippedPartitionsRDD4(sc, sc.clean(f), this, rdd2, rdd3, rdd4, preservesPartitioning)
  
 -  def zipPartitions[B: ClassManifest, C: ClassManifest, D: ClassManifest, V: ClassManifest]
 +  def zipPartitions[B: ClassTag, C: ClassTag, D: ClassTag, V: ClassTag]
        (rdd2: RDD[B], rdd3: RDD[C], rdd4: RDD[D])
        (f: (Iterator[T], Iterator[B], Iterator[C], Iterator[D]) => Iterator[V]): RDD[V] =
-     new ZippedPartitionsRDD4(sc, sc.clean(f), this, rdd2, rdd3, rdd4)
+     new ZippedPartitionsRDD4(sc, sc.clean(f), this, rdd2, rdd3, rdd4, false)
  
  
    // Actions (launch a job to return a value to the user program)

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/44fd30d3/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
----------------------------------------------------------------------
diff --cc core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
index e02c17b,faeb316..9313bf8
--- a/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
+++ b/core/src/main/scala/org/apache/spark/rdd/ZippedPartitionsRDD.scala
@@@ -38,11 -37,15 +38,15 @@@ private[spark] class ZippedPartitionsPa
    }
  }
  
 -abstract class ZippedPartitionsBaseRDD[V: ClassManifest](
 +abstract class ZippedPartitionsBaseRDD[V: ClassTag](
      sc: SparkContext,
-     var rdds: Seq[RDD[_]])
+     var rdds: Seq[RDD[_]],
+     preservesPartitioning: Boolean = false)
    extends RDD[V](sc, rdds.map(x => new OneToOneDependency(x))) {
  
+   override val partitioner =
+     if (preservesPartitioning) firstParent[Any].partitioner else None
+ 
    override def getPartitions: Array[Partition] = {
      val sizes = rdds.map(x => x.partitions.size)
      if (!sizes.forall(x => x == sizes(0))) {

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/44fd30d3/core/src/main/scala/org/apache/spark/util/Utils.scala
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/44fd30d3/pom.xml
----------------------------------------------------------------------

http://git-wip-us.apache.org/repos/asf/incubator-spark/blob/44fd30d3/project/SparkBuild.scala
----------------------------------------------------------------------
diff --cc project/SparkBuild.scala
index aa5fe70,819dfa2..3584e88
--- a/project/SparkBuild.scala
+++ b/project/SparkBuild.scala
@@@ -200,35 -202,38 +200,36 @@@ object SparkBuild extends Build 
      ),
  
      libraryDependencies ++= Seq(
 -      "com.google.guava" % "guava" % "14.0.1",
 -      "com.google.code.findbugs" % "jsr305" % "1.3.9",
 -      "log4j" % "log4j" % "1.2.17",
 -      "org.slf4j" % "slf4j-api" % slf4jVersion,
 -      "org.slf4j" % "slf4j-log4j12" % slf4jVersion,
 -      "commons-daemon" % "commons-daemon" % "1.0.10",  // workaround for bug HADOOP-9407
 -      "com.ning" % "compress-lzf" % "0.8.4",
 -      "org.xerial.snappy" % "snappy-java" % "1.0.5",
 -      "org.ow2.asm" % "asm" % "4.0",
 -      "com.google.protobuf" % "protobuf-java" % "2.4.1",
 -      "com.typesafe.akka" % "akka-actor" % "2.0.5" excludeAll(excludeNetty),
 -      "com.typesafe.akka" % "akka-remote" % "2.0.5" excludeAll(excludeNetty),
 -      "com.typesafe.akka" % "akka-slf4j" % "2.0.5" excludeAll(excludeNetty),
 -      "it.unimi.dsi" % "fastutil" % "6.4.4",
 -      "colt" % "colt" % "1.2.0",
 -      "net.liftweb" % "lift-json_2.9.2" % "2.5",
 -      "org.apache.mesos" % "mesos" % "0.13.0",
 -      "io.netty" % "netty-all" % "4.0.0.Beta2",
 -      "org.apache.derby" % "derby" % "10.4.2.0" % "test",
 -      "org.apache.hadoop" % "hadoop-client" % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm, excludeCglib),
 -      "net.java.dev.jets3t" % "jets3t" % "0.7.1",
 -      "org.apache.avro" % "avro" % "1.7.4",
 -      "org.apache.avro" % "avro-ipc" % "1.7.4" excludeAll(excludeNetty),
 -      "org.apache.zookeeper" % "zookeeper" % "3.4.5" excludeAll(excludeNetty),
 -      "com.codahale.metrics" % "metrics-core" % "3.0.0",
 -      "com.codahale.metrics" % "metrics-jvm" % "3.0.0",
 -      "com.codahale.metrics" % "metrics-json" % "3.0.0",
 -      "com.codahale.metrics" % "metrics-ganglia" % "3.0.0",
 -      "com.codahale.metrics" % "metrics-graphite" % "3.0.0",
 -      "com.twitter" % "chill_2.9.3" % "0.3.1",
 -      "com.twitter" % "chill-java" % "0.3.1"
 -    )
 +        "com.google.guava"         % "guava"            % "14.0.1",
 +        "com.google.code.findbugs" % "jsr305"           % "1.3.9",
 +        "log4j"                    % "log4j"            % "1.2.17",
 +        "org.slf4j"                % "slf4j-api"        % slf4jVersion,
 +        "org.slf4j"                % "slf4j-log4j12"    % slf4jVersion,
++        "commons-daemon"           % "commons-daemon"   % "1.0.10", // workaround for bug HADOOP-9407
 +        "com.ning"                 % "compress-lzf"     % "0.8.4",
 +        "org.xerial.snappy"        % "snappy-java"      % "1.0.5",
-         "commons-daemon"           % "commons-daemon"   % "1.0.10", // workaround for bug HADOOP-9407
 +        "org.ow2.asm"              % "asm"              % "4.0",
 +        "com.google.protobuf"      % "protobuf-java"    % "2.4.1",
 +        "com.typesafe.akka"       %% "akka-remote"      % "2.2.3"  excludeAll(excludeNetty), 
 +        "com.typesafe.akka"       %% "akka-slf4j"       % "2.2.3"  excludeAll(excludeNetty),
 +        "net.liftweb"             %% "lift-json"        % "2.5.1"  excludeAll(excludeNetty),
 +        "it.unimi.dsi"             % "fastutil"         % "6.4.4",
 +        "colt"                     % "colt"             % "1.2.0",
 +        "org.apache.mesos"         % "mesos"            % "0.13.0",
 +        "net.java.dev.jets3t"      % "jets3t"           % "0.7.1",
 +        "org.apache.derby"         % "derby"            % "10.4.2.0"                     % "test",
 +        "org.apache.hadoop"        % "hadoop-client"    % hadoopVersion excludeAll(excludeJackson, excludeNetty, excludeAsm, excludeCglib),
 +        "org.apache.avro"          % "avro"             % "1.7.4",
 +        "org.apache.avro"          % "avro-ipc"         % "1.7.4" excludeAll(excludeNetty),
 +        "org.apache.zookeeper"     % "zookeeper"        % "3.4.5" excludeAll(excludeNetty),
 +        "com.codahale.metrics"     % "metrics-core"     % "3.0.0",
 +        "com.codahale.metrics"     % "metrics-jvm"      % "3.0.0",
 +        "com.codahale.metrics"     % "metrics-json"     % "3.0.0",
 +        "com.codahale.metrics"     % "metrics-ganglia"  % "3.0.0",
++        "com.codahale.metrics"     % "metrics-graphite" % "3.0.0",
 +        "com.twitter"             %% "chill"            % "0.3.1",
 +        "com.twitter"              % "chill-java"       % "0.3.1"
 +      )
    )
  
    def rootSettings = sharedSettings ++ Seq(