You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by td...@apache.org on 2014/03/19 06:09:46 UTC

git commit: [SPARK-1274] Add dev scripts to merge PRs and create releases from master to branch-0.9

Repository: spark
Updated Branches:
  refs/heads/branch-0.9 0183ddda1 -> 7ec78bc54


[SPARK-1274] Add dev scripts to merge PRs and create releases from master to branch-0.9

All the files are one-to-one copied from master. Only the Spark version numbers were changed.

Author: Tathagata Das <ta...@gmail.com>

Closes #176 from tdas/branch-0.9 and squashes the following commits:

fb1b913 [Tathagata Das] Changed version numbers for 0.9.1 release.
3303d5e [Tathagata Das] Copied Apache release scripts in master to branch-0.9


Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/7ec78bc5
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/7ec78bc5
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/7ec78bc5

Branch: refs/heads/branch-0.9
Commit: 7ec78bc54f66ea2a85b92d4421566b654e5076c0
Parents: 0183ddd
Author: Tathagata Das <ta...@gmail.com>
Authored: Tue Mar 18 22:09:16 2014 -0700
Committer: Tathagata Das <ta...@gmail.com>
Committed: Tue Mar 18 22:09:16 2014 -0700

----------------------------------------------------------------------
 dev/audit-release/.gitignore                    |   2 +
 dev/audit-release/README.md                     |  11 +
 dev/audit-release/audit_release.py              | 223 +++++++++++++++++++
 dev/audit-release/blank_maven_build/pom.xml     |  47 ++++
 dev/audit-release/blank_sbt_build/build.sbt     |  29 +++
 dev/audit-release/maven_app_core/input.txt      |   8 +
 dev/audit-release/maven_app_core/pom.xml        |  56 +++++
 .../maven_app_core/src/main/java/SimpleApp.java |  41 ++++
 dev/audit-release/sbt_app_core/build.sbt        |  29 +++
 dev/audit-release/sbt_app_core/input.txt        |   8 +
 .../sbt_app_core/src/main/scala/SparkApp.scala  |  49 ++++
 dev/audit-release/sbt_app_ganglia/build.sbt     |  31 +++
 .../src/main/scala/SparkApp.scala               |  39 ++++
 dev/audit-release/sbt_app_graphx/build.sbt      |  29 +++
 .../src/main/scala/GraphxApp.scala              |  47 ++++
 dev/audit-release/sbt_app_streaming/build.sbt   |  29 +++
 .../src/main/scala/StreamingApp.scala           |  62 ++++++
 dev/create-release/create-release.sh            | 132 +++++++++++
 dev/merge_spark_pr.py                           | 200 +++++++++++++++++
 19 files changed, 1072 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/.gitignore
----------------------------------------------------------------------
diff --git a/dev/audit-release/.gitignore b/dev/audit-release/.gitignore
new file mode 100644
index 0000000..7e057a9
--- /dev/null
+++ b/dev/audit-release/.gitignore
@@ -0,0 +1,2 @@
+project/
+spark_audit*

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/README.md
----------------------------------------------------------------------
diff --git a/dev/audit-release/README.md b/dev/audit-release/README.md
new file mode 100644
index 0000000..2437a98
--- /dev/null
+++ b/dev/audit-release/README.md
@@ -0,0 +1,11 @@
+# Test Application Builds
+This directory includes test applications which are built when auditing releases. You can
+run them locally by setting appropriate environment variables.
+
+```
+$ cd sbt_app_core
+$ SCALA_VERSION=2.10.3 \
+  SPARK_VERSION=1.0.0-SNAPSHOT \
+  SPARK_RELEASE_REPOSITORY=file:///home/patrick/.ivy2/local \
+  sbt run
+```

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/audit_release.py
----------------------------------------------------------------------
diff --git a/dev/audit-release/audit_release.py b/dev/audit-release/audit_release.py
new file mode 100755
index 0000000..922cca2
--- /dev/null
+++ b/dev/audit-release/audit_release.py
@@ -0,0 +1,223 @@
+#!/usr/bin/python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Audits binary and maven artifacts for a Spark release.
+# Requires GPG and Maven.
+# usage:
+#   python audit_release.py
+
+import os
+import re
+import shutil
+import subprocess
+import sys
+import time
+import urllib2
+
+## Fill in release details here:
+RELEASE_URL = "http://people.apache.org/~tdas/spark-0.9.1-rc1/"
+RELEASE_KEY = "9E4FE3AF"
+RELEASE_REPOSITORY = "https://repository.apache.org/content/repositories/orgapachespark-1006/"
+RELEASE_VERSION = "0.9.1"
+SCALA_VERSION = "2.10.3"
+SCALA_BINARY_VERSION = "2.10"
+##
+
+LOG_FILE_NAME = "spark_audit_%s" % time.strftime("%h_%m_%Y_%I_%M_%S")
+LOG_FILE = open(LOG_FILE_NAME, 'w')
+WORK_DIR = "/tmp/audit_%s" % int(time.time()) 
+MAVEN_CMD = "mvn"
+GPG_CMD = "gpg"
+
+print "Starting tests, log output in %s. Test results printed below:" % LOG_FILE_NAME
+
+# Track failures
+failures = []
+
+def clean_work_files():
+  print "OK to delete scratch directory '%s'? (y/N): " % WORK_DIR
+  response = raw_input()
+  if response == "y":
+    shutil.rmtree(WORK_DIR) 
+  print "Should I delete the log output file '%s'? (y/N): " % LOG_FILE_NAME
+  response = raw_input()
+  if response == "y":
+    os.unlink(LOG_FILE_NAME)
+
+def run_cmd(cmd, exit_on_failure=True):
+  print >> LOG_FILE, "Running command: %s" % cmd
+  ret = subprocess.call(cmd, shell=True, stdout=LOG_FILE, stderr=LOG_FILE)
+  if ret != 0 and exit_on_failure:
+    print "Command failed: %s" % cmd
+    clean_work_files()
+    sys.exit(-1)
+  return ret
+
+def run_cmd_with_output(cmd):
+  print >> sys.stderr, "Running command: %s" % cmd
+  return subprocess.check_output(cmd, shell=True, stderr=LOG_FILE)
+
+def test(bool, str):
+  if bool:
+    return passed(str)
+  failed(str)
+
+def passed(str):
+  print "[PASSED] %s" % str
+
+def failed(str):
+  failures.append(str)
+  print "[**FAILED**] %s" % str
+
+def get_url(url):
+  return urllib2.urlopen(url).read()
+
+original_dir = os.getcwd()
+
+# For each of these modules, we'll test an 'empty' application in sbt and 
+# maven that links against them. This will catch issues with messed up
+# dependencies within those projects.
+modules = ["spark-core", "spark-bagel", "spark-mllib", "spark-streaming", "spark-repl", 
+           "spark-graphx", "spark-streaming-flume", "spark-streaming-kafka", 
+           "spark-streaming-mqtt", "spark-streaming-twitter", "spark-streaming-zeromq"]
+modules = map(lambda m: "%s_%s" % (m, SCALA_BINARY_VERSION), modules)
+
+# Check for directories that might interfere with tests
+local_ivy_spark = "~/.ivy2/local/org.apache.spark"
+cache_ivy_spark = "~/.ivy2/cache/org.apache.spark"
+local_maven_kafka = "~/.m2/repository/org/apache/kafka"
+local_maven_kafka = "~/.m2/repository/org/apache/spark"
+def ensure_path_not_present(x):
+  if os.path.exists(os.path.expanduser(x)):
+    print "Please remove %s, it can interfere with testing published artifacts." % x
+    sys.exit(-1)
+map(ensure_path_not_present, [local_ivy_spark, cache_ivy_spark, local_maven_kafka])
+
+# SBT build tests 
+os.chdir("blank_sbt_build")
+os.environ["SPARK_VERSION"] = RELEASE_VERSION
+os.environ["SCALA_VERSION"] = SCALA_VERSION
+os.environ["SPARK_RELEASE_REPOSITORY"] = RELEASE_REPOSITORY
+for module in modules:
+  os.environ["SPARK_MODULE"] = module
+  ret = run_cmd("sbt clean update", exit_on_failure=False)
+  test(ret == 0, "sbt build against '%s' module" % module) 
+os.chdir(original_dir)
+
+# SBT application tests
+for app in ["sbt_app_core", "sbt_app_graphx", "sbt_app_streaming"]:
+  os.chdir(app)
+  ret = run_cmd("sbt clean run", exit_on_failure=False)
+  test(ret == 0, "sbt application (%s)" % app)
+  os.chdir(original_dir)
+
+# Maven build tests
+os.chdir("blank_maven_build")
+for module in modules:
+  cmd = ('%s --update-snapshots -Dspark.release.repository="%s" -Dspark.version="%s" '
+      '-Dspark.module="%s" clean compile' % 
+      (MAVEN_CMD, RELEASE_REPOSITORY, RELEASE_VERSION, module))
+  ret = run_cmd(cmd, exit_on_failure=False)
+  test(ret == 0, "maven build against '%s' module" % module)
+os.chdir(original_dir)
+
+os.chdir("maven_app_core")
+mvn_exec_cmd = ('%s --update-snapshots -Dspark.release.repository="%s" -Dspark.version="%s" '
+                '-Dscala.binary.version="%s" clean compile '
+                'exec:java -Dexec.mainClass="SimpleApp"' % 
+               (MAVEN_CMD, RELEASE_REPOSITORY, RELEASE_VERSION, SCALA_BINARY_VERSION))
+ret = run_cmd(mvn_exec_cmd, exit_on_failure=False)
+test(ret == 0, "maven application (core)")
+os.chdir(original_dir)
+
+# Binary artifact tests
+if os.path.exists(WORK_DIR):
+  print "Working directory '%s' already exists" % WORK_DIR
+  sys.exit(-1)
+os.mkdir(WORK_DIR)
+os.chdir(WORK_DIR)
+
+index_page = get_url(RELEASE_URL)
+artifact_regex = r = re.compile("<a href=\"(.*.tgz)\">")
+artifacts = r.findall(index_page)
+
+for artifact in artifacts:
+  print "==== Verifying download integrity for artifact: %s ====" % artifact
+
+  artifact_url = "%s/%s" % (RELEASE_URL, artifact)
+  run_cmd("wget %s" % artifact_url)
+
+  key_file = "%s.asc" % artifact
+  run_cmd("wget %s/%s" % (RELEASE_URL, key_file))
+
+  run_cmd("wget %s%s" % (artifact_url, ".sha"))
+
+  # Verify signature
+  run_cmd("%s --keyserver pgp.mit.edu --recv-key %s" % (GPG_CMD, RELEASE_KEY))
+  run_cmd("%s %s" % (GPG_CMD, key_file))
+  passed("Artifact signature verified.")
+
+  # Verify md5
+  my_md5 = run_cmd_with_output("%s --print-md MD5 %s" % (GPG_CMD, artifact)).strip()
+  release_md5 = get_url("%s.md5" % artifact_url).strip()
+  test(my_md5 == release_md5, "Artifact MD5 verified.")
+
+  # Verify sha
+  my_sha = run_cmd_with_output("%s --print-md SHA512 %s" % (GPG_CMD, artifact)).strip()
+  release_sha = get_url("%s.sha" % artifact_url).strip()
+  test(my_sha == release_sha, "Artifact SHA verified.")
+
+  # Verify Apache required files
+  dir_name = artifact.replace(".tgz", "")
+  run_cmd("tar xvzf %s" % artifact)
+  base_files = os.listdir(dir_name)
+  test("CHANGES.txt" in base_files, "Tarball contains CHANGES.txt file")
+  test("NOTICE" in base_files, "Tarball contains NOTICE file")
+  test("LICENSE" in base_files, "Tarball contains LICENSE file")
+ 
+  os.chdir(WORK_DIR)
+ 
+for artifact in artifacts:
+  print "==== Verifying build and tests for artifact: %s ====" % artifact
+  os.chdir(os.path.join(WORK_DIR, dir_name))
+
+  os.environ["MAVEN_OPTS"] = "-Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"
+  # Verify build
+  print "==> Running build"
+  run_cmd("sbt assembly")
+  passed("sbt build successful")
+  run_cmd("%s package -DskipTests" % MAVEN_CMD)    
+  passed("Maven build successful")
+
+  # Verify tests
+  print "==> Performing unit tests"
+  run_cmd("%s test" % MAVEN_CMD)
+  passed("Tests successful")
+  os.chdir(WORK_DIR)
+
+clean_work_files()
+
+if len(failures) == 0:
+  print "ALL TESTS PASSED"
+else:
+  print "SOME TESTS DID NOT PASS"
+  for f in failures:
+    print f
+
+os.chdir(original_dir)

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/blank_maven_build/pom.xml
----------------------------------------------------------------------
diff --git a/dev/audit-release/blank_maven_build/pom.xml b/dev/audit-release/blank_maven_build/pom.xml
new file mode 100644
index 0000000..047659e
--- /dev/null
+++ b/dev/audit-release/blank_maven_build/pom.xml
@@ -0,0 +1,47 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project>
+  <groupId>spark.audit</groupId>
+  <artifactId>spark-audit</artifactId>
+  <modelVersion>4.0.0</modelVersion>
+  <name>Spark Release Auditor</name>
+  <packaging>jar</packaging>
+  <version>1.0</version>
+  <repositories>
+    <repository>
+      <id>Spray.cc repository</id>
+      <url>http://repo.spray.cc</url>
+    </repository>
+    <repository>
+      <id>Akka repository</id>
+      <url>http://repo.akka.io/releases</url>
+    </repository>
+    <repository>
+      <id>Spark Staging Repo</id>
+      <url>${spark.release.repository}</url>
+    </repository>
+  </repositories>
+  <dependencies>
+    <dependency>
+      <groupId>org.apache.spark</groupId>
+      <artifactId>${spark.module}</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+  </dependencies>
+</project>

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/blank_sbt_build/build.sbt
----------------------------------------------------------------------
diff --git a/dev/audit-release/blank_sbt_build/build.sbt b/dev/audit-release/blank_sbt_build/build.sbt
new file mode 100644
index 0000000..1cf5274
--- /dev/null
+++ b/dev/audit-release/blank_sbt_build/build.sbt
@@ -0,0 +1,29 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+name := "Spark Release Auditor"
+
+version := "1.0"
+
+scalaVersion := "2.9.3"
+
+libraryDependencies += "org.apache.spark" % System.getenv.get("SPARK_MODULE") % System.getenv.get("SPARK_VERSION")
+
+resolvers ++= Seq(
+  "Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"),
+  "Akka Repository" at "http://repo.akka.io/releases/",
+  "Spray Repository" at "http://repo.spray.cc/")

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/maven_app_core/input.txt
----------------------------------------------------------------------
diff --git a/dev/audit-release/maven_app_core/input.txt b/dev/audit-release/maven_app_core/input.txt
new file mode 100644
index 0000000..837b6f8
--- /dev/null
+++ b/dev/audit-release/maven_app_core/input.txt
@@ -0,0 +1,8 @@
+a
+b
+c
+d
+a
+b
+c
+d

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/maven_app_core/pom.xml
----------------------------------------------------------------------
diff --git a/dev/audit-release/maven_app_core/pom.xml b/dev/audit-release/maven_app_core/pom.xml
new file mode 100644
index 0000000..0b837c0
--- /dev/null
+++ b/dev/audit-release/maven_app_core/pom.xml
@@ -0,0 +1,56 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+  ~ Licensed to the Apache Software Foundation (ASF) under one or more
+  ~ contributor license agreements.  See the NOTICE file distributed with
+  ~ this work for additional information regarding copyright ownership.
+  ~ The ASF licenses this file to You under the Apache License, Version 2.0
+  ~ (the "License"); you may not use this file except in compliance with
+  ~ the License.  You may obtain a copy of the License at
+  ~
+  ~    http://www.apache.org/licenses/LICENSE-2.0
+  ~
+  ~ Unless required by applicable law or agreed to in writing, software
+  ~ distributed under the License is distributed on an "AS IS" BASIS,
+  ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  ~ See the License for the specific language governing permissions and
+  ~ limitations under the License.
+  -->
+
+<project>
+  <groupId>spark.audit</groupId>
+  <artifactId>spark-audit</artifactId>
+  <modelVersion>4.0.0</modelVersion>
+  <name>Simple Project</name>
+  <packaging>jar</packaging>
+  <version>1.0</version>
+  <repositories>
+    <repository>
+      <id>Spray.cc repository</id>
+      <url>http://repo.spray.cc</url>
+    </repository>
+    <repository>
+      <id>Akka repository</id>
+      <url>http://repo.akka.io/releases</url>
+    </repository>
+    <repository>
+      <id>Spark Staging Repo</id>
+      <url>${spark.release.repository}</url>
+    </repository>
+  </repositories>
+  <dependencies>
+    <dependency> <!-- Spark dependency -->
+      <groupId>org.apache.spark</groupId>
+      <artifactId>spark-core_${scala.binary.version}</artifactId>
+      <version>${spark.version}</version>
+    </dependency>
+  </dependencies>
+  <!-- Makes sure we get a fairly recent compiler plugin. -->
+  <build>
+    <plugins>
+      <plugin>
+        <artifactId>maven-compiler-plugin</artifactId>
+        <version>2.3.2</version>
+      </plugin>
+    </plugins>
+  </build>
+</project>

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/maven_app_core/src/main/java/SimpleApp.java
----------------------------------------------------------------------
diff --git a/dev/audit-release/maven_app_core/src/main/java/SimpleApp.java b/dev/audit-release/maven_app_core/src/main/java/SimpleApp.java
new file mode 100644
index 0000000..6b65dda
--- /dev/null
+++ b/dev/audit-release/maven_app_core/src/main/java/SimpleApp.java
@@ -0,0 +1,41 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+import org.apache.spark.api.java.*;
+import org.apache.spark.api.java.function.Function;
+
+public class SimpleApp {
+  public static void main(String[] args) {
+    String logFile = "input.txt";
+    JavaSparkContext sc = new JavaSparkContext("local", "Simple App");
+    JavaRDD<String> logData = sc.textFile(logFile).cache();
+
+    long numAs = logData.filter(new Function<String, Boolean>() {
+      public Boolean call(String s) { return s.contains("a"); }
+    }).count();
+
+    long numBs = logData.filter(new Function<String, Boolean>() {
+      public Boolean call(String s) { return s.contains("b"); }
+    }).count();
+
+   if (numAs != 2 || numBs != 2) {
+     System.out.println("Failed to parse log files with Spark");
+     System.exit(-1);
+   }
+   System.out.println("Test succeeded");
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_core/build.sbt
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_core/build.sbt b/dev/audit-release/sbt_app_core/build.sbt
new file mode 100644
index 0000000..97a8cc3
--- /dev/null
+++ b/dev/audit-release/sbt_app_core/build.sbt
@@ -0,0 +1,29 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+name := "Simple Project"
+
+version := "1.0"
+
+scalaVersion := System.getenv.get("SCALA_VERSION")
+
+libraryDependencies += "org.apache.spark" %% "spark-core" % System.getenv.get("SPARK_VERSION")
+
+resolvers ++= Seq(
+  "Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"),
+  "Akka Repository" at "http://repo.akka.io/releases/",
+  "Spray Repository" at "http://repo.spray.cc/")

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_core/input.txt
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_core/input.txt b/dev/audit-release/sbt_app_core/input.txt
new file mode 100644
index 0000000..837b6f8
--- /dev/null
+++ b/dev/audit-release/sbt_app_core/input.txt
@@ -0,0 +1,8 @@
+a
+b
+c
+d
+a
+b
+c
+d

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_core/src/main/scala/SparkApp.scala
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_core/src/main/scala/SparkApp.scala b/dev/audit-release/sbt_app_core/src/main/scala/SparkApp.scala
new file mode 100644
index 0000000..53fe432
--- /dev/null
+++ b/dev/audit-release/sbt_app_core/src/main/scala/SparkApp.scala
@@ -0,0 +1,49 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package main.scala
+
+import scala.util.Try
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+
+object SimpleApp {
+  def main(args: Array[String]) {
+    val logFile = "input.txt"
+    val sc = new SparkContext("local", "Simple App")
+    val logData = sc.textFile(logFile, 2).cache()
+    val numAs = logData.filter(line => line.contains("a")).count()
+    val numBs = logData.filter(line => line.contains("b")).count()
+    if (numAs != 2 || numBs != 2) {
+      println("Failed to parse log files with Spark")
+      System.exit(-1)
+    }
+
+    // Regression test for SPARK-1167: Remove metrics-ganglia from default build due to LGPL issue
+    val foundConsole = Try(Class.forName("org.apache.spark.metrics.sink.ConsoleSink")).isSuccess
+    val foundGanglia = Try(Class.forName("org.apache.spark.metrics.sink.GangliaSink")).isSuccess
+    if (!foundConsole) {
+      println("Console sink not loaded via spark-core")
+      System.exit(-1)
+    }
+    if (foundGanglia) {
+      println("Ganglia sink was loaded via spark-core")
+      System.exit(-1)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_ganglia/build.sbt
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_ganglia/build.sbt b/dev/audit-release/sbt_app_ganglia/build.sbt
new file mode 100644
index 0000000..55db675
--- /dev/null
+++ b/dev/audit-release/sbt_app_ganglia/build.sbt
@@ -0,0 +1,31 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+name := "Ganglia Test"
+
+version := "1.0"
+
+scalaVersion := System.getenv.get("SCALA_VERSION")
+
+libraryDependencies += "org.apache.spark" %% "spark-core" % System.getenv.get("SPARK_VERSION")
+
+libraryDependencies += "org.apache.spark" %% "spark-ganglia-lgpl" % System.getenv.get("SPARK_VERSION")
+
+resolvers ++= Seq(
+  "Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"),
+  "Akka Repository" at "http://repo.akka.io/releases/",
+  "Spray Repository" at "http://repo.spray.cc/")

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_ganglia/src/main/scala/SparkApp.scala
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_ganglia/src/main/scala/SparkApp.scala b/dev/audit-release/sbt_app_ganglia/src/main/scala/SparkApp.scala
new file mode 100644
index 0000000..0be8e64
--- /dev/null
+++ b/dev/audit-release/sbt_app_ganglia/src/main/scala/SparkApp.scala
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package main.scala
+
+import scala.util.Try
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+
+object SimpleApp {
+  def main(args: Array[String]) {
+    // Regression test for SPARK-1167: Remove metrics-ganglia from default build due to LGPL issue
+    val foundConsole = Try(Class.forName("org.apache.spark.metrics.sink.ConsoleSink")).isSuccess
+    val foundGanglia = Try(Class.forName("org.apache.spark.metrics.sink.GangliaSink")).isSuccess
+    if (!foundConsole) {
+      println("Console sink not loaded via spark-core")
+      System.exit(-1)
+    }
+    if (!foundGanglia) {
+      println("Ganglia sink not loaded via spark-ganglia-lgpl")
+      System.exit(-1)
+    }
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_graphx/build.sbt
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_graphx/build.sbt b/dev/audit-release/sbt_app_graphx/build.sbt
new file mode 100644
index 0000000..66f2db3
--- /dev/null
+++ b/dev/audit-release/sbt_app_graphx/build.sbt
@@ -0,0 +1,29 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+name := "Simple Project"
+
+version := "1.0"
+
+scalaVersion := System.getenv.get("SCALA_VERSION")
+
+libraryDependencies += "org.apache.spark" %% "spark-graphx" % System.getenv.get("SPARK_VERSION")
+
+resolvers ++= Seq(
+  "Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"),
+  "Akka Repository" at "http://repo.akka.io/releases/",
+  "Spray Repository" at "http://repo.spray.cc/")

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_graphx/src/main/scala/GraphxApp.scala
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_graphx/src/main/scala/GraphxApp.scala b/dev/audit-release/sbt_app_graphx/src/main/scala/GraphxApp.scala
new file mode 100644
index 0000000..da08e01
--- /dev/null
+++ b/dev/audit-release/sbt_app_graphx/src/main/scala/GraphxApp.scala
@@ -0,0 +1,47 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package main.scala
+
+import org.apache.spark.SparkContext
+import org.apache.spark.SparkContext._
+import org.apache.spark.graphx._
+import org.apache.spark.rdd.RDD
+
+object GraphXApp {
+  def main(args: Array[String]) {
+    val sc = new SparkContext("local", "Simple GraphX App")
+    val users: RDD[(VertexId, (String, String))] =
+      sc.parallelize(Array((3L, ("rxin", "student")), (7L, ("jgonzal", "postdoc")),
+                           (5L, ("franklin", "prof")), (2L, ("istoica", "prof")),
+                           (4L, ("peter", "student"))))
+    val relationships: RDD[Edge[String]] =
+      sc.parallelize(Array(Edge(3L, 7L, "collab"),    Edge(5L, 3L, "advisor"),
+                           Edge(2L, 5L, "colleague"), Edge(5L, 7L, "pi"),
+                           Edge(4L, 0L, "student"),   Edge(5L, 0L, "colleague")))
+    val defaultUser = ("John Doe", "Missing")
+    val graph = Graph(users, relationships, defaultUser)
+    // Notice that there is a user 0 (for which we have no information) connected to users
+    // 4 (peter) and 5 (franklin).
+    val triplets = graph.triplets.map(e => (e.srcAttr._1, e.dstAttr._1)).collect
+    if (!triplets.exists(_ == ("peter", "John Doe"))) {
+      println("Failed to run GraphX")
+      System.exit(-1)
+    }
+    println("Test succeeded")
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_streaming/build.sbt
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_streaming/build.sbt b/dev/audit-release/sbt_app_streaming/build.sbt
new file mode 100644
index 0000000..492e5e7
--- /dev/null
+++ b/dev/audit-release/sbt_app_streaming/build.sbt
@@ -0,0 +1,29 @@
+//
+// Licensed to the Apache Software Foundation (ASF) under one or more
+// contributor license agreements.  See the NOTICE file distributed with
+// this work for additional information regarding copyright ownership.
+// The ASF licenses this file to You under the Apache License, Version 2.0
+// (the "License"); you may not use this file except in compliance with
+// the License.  You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+
+name := "Simple Project"
+
+version := "1.0"
+
+scalaVersion := System.getenv.get("SCALA_VERSION")
+
+libraryDependencies += "org.apache.spark" %% "spark-streaming" % System.getenv.get("SPARK_VERSION")
+
+resolvers ++= Seq(
+  "Spark Release Repository" at System.getenv.get("SPARK_RELEASE_REPOSITORY"),
+  "Akka Repository" at "http://repo.akka.io/releases/",
+  "Spray Repository" at "http://repo.spray.cc/")

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala
----------------------------------------------------------------------
diff --git a/dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala b/dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala
new file mode 100644
index 0000000..3d0722d
--- /dev/null
+++ b/dev/audit-release/sbt_app_streaming/src/main/scala/StreamingApp.scala
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package main.scala
+
+import scala.collection.mutable.{ListBuffer, Queue}
+
+import org.apache.spark.SparkConf
+import org.apache.spark.rdd.RDD
+import org.apache.spark.streaming.StreamingContext
+import org.apache.spark.streaming._
+
+object SparkStreamingExample {
+
+  def main(args: Array[String]) {
+    val conf = new SparkConf(true)
+      .setMaster("local[2]")
+      .setAppName("Streaming test")
+    val ssc = new StreamingContext(conf, Seconds(1))
+    val seen = ListBuffer[RDD[Int]]()
+
+    val rdd1 = ssc.sparkContext.makeRDD(1 to 100, 10)
+    val rdd2 = ssc.sparkContext.makeRDD(1 to 1000, 10)
+    val rdd3 = ssc.sparkContext.makeRDD(1 to 10000, 10)
+
+    val queue = Queue(rdd1, rdd2, rdd3)
+    val stream = ssc.queueStream(queue)
+
+    stream.foreachRDD(rdd => seen += rdd)
+    ssc.start()
+    Thread.sleep(5000)
+
+    def test(f: => Boolean, failureMsg: String) = {
+      if (!f) {
+        println(failureMsg)
+        System.exit(-1)
+      }
+    }
+
+    val rddCounts = seen.map(rdd => rdd.count()).filter(_ > 0)
+    test(rddCounts.length == 3, "Did not collect three RDD's from stream")
+    test(rddCounts.toSet == Set(100, 1000, 10000), "Did not find expected streams")
+
+    println("Test succeeded")
+
+    ssc.stop()
+  }
+}

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/create-release/create-release.sh
----------------------------------------------------------------------
diff --git a/dev/create-release/create-release.sh b/dev/create-release/create-release.sh
new file mode 100755
index 0000000..d0e27cd
--- /dev/null
+++ b/dev/create-release/create-release.sh
@@ -0,0 +1,132 @@
+#!/usr/bin/env bash
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Quick-and-dirty automation of making maven and binary releases. Not robust at all.
+# Publishes releases to Maven and packages/copies binary release artifacts.
+# Expects to be run in a totally empty directory.
+#
+# Would be nice to add:
+#  - Send output to stderr and have useful logging in stdout
+#  - Have this use sbt rather than Maven release plug in
+
+GIT_USERNAME=XXX
+GIT_PASSWORD=XXX
+GPG_PASSPHRASE=XXX
+GIT_BRANCH=branch-0.9
+RELEASE_VERSION=0.9.1
+RC_NAME=rc2
+USER_NAME=pwendell
+
+set -e
+
+GIT_TAG=v$RELEASE_VERSION
+
+# Artifact publishing
+
+git clone https://git-wip-us.apache.org/repos/asf/spark.git -b $GIT_BRANCH
+cd spark
+export MAVEN_OPTS="-Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"
+
+mvn -Pyarn release:clean
+
+mvn -DskipTests \
+  -Darguments="-DskipTests=true -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 -Dgpg.passphrase=${GPG_PASSPHRASE}" \
+  -Dusername=$GIT_USERNAME -Dpassword=$GIT_PASSWORD \
+  -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
+  -Pyarn -Pspark-ganglia-lgpl \
+  -Dtag=$GIT_TAG -DautoVersionSubmodules=true \
+  --batch-mode release:prepare
+
+mvn -DskipTests \
+  -Darguments="-DskipTests=true -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 -Dgpg.passphrase=${GPG_PASSPHRASE}" \
+  -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0 \
+  -Pyarn -Pspark-ganglia-lgpl\
+  release:perform
+
+rm -rf spark
+
+# Source and binary tarballs
+git clone https://git-wip-us.apache.org/repos/asf/spark.git
+cd spark
+git checkout --force $GIT_TAG
+release_hash=`git rev-parse HEAD`
+
+rm .gitignore
+rm -rf .git
+cd ..
+
+cp -r spark spark-$RELEASE_VERSION
+tar cvzf spark-$RELEASE_VERSION.tgz spark-$RELEASE_VERSION
+echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --armour --output spark-$RELEASE_VERSION.tgz.asc \
+  --detach-sig spark-$RELEASE_VERSION.tgz
+echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --print-md MD5 spark-$RELEASE_VERSION.tgz > \
+  spark-$RELEASE_VERSION.tgz.md5
+echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --print-md SHA512 spark-$RELEASE_VERSION.tgz > \
+  spark-$RELEASE_VERSION.tgz.sha
+rm -rf spark-$RELEASE_VERSION
+
+make_binary_release() {
+  NAME=$1
+  MAVEN_FLAGS=$2
+
+  cp -r spark spark-$RELEASE_VERSION-bin-$NAME
+  cd spark-$RELEASE_VERSION-bin-$NAME
+  export MAVEN_OPTS="-Xmx3g -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=1g"
+  mvn $MAVEN_FLAGS -DskipTests clean package
+  find . -name test-classes -type d | xargs rm -rf
+  find . -name classes -type d | xargs rm -rf
+  cd ..
+  tar cvzf spark-$RELEASE_VERSION-bin-$NAME.tgz spark-$RELEASE_VERSION-bin-$NAME
+  echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --armour \
+    --output spark-$RELEASE_VERSION-bin-$NAME.tgz.asc \
+    --detach-sig spark-$RELEASE_VERSION-bin-$NAME.tgz
+  echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --print-md \
+    MD5 spark-$RELEASE_VERSION-bin-$NAME.tgz > \
+    spark-$RELEASE_VERSION-bin-$NAME.tgz.md5
+  echo $GPG_PASSPHRASE | gpg --passphrase-fd 0 --print-md \
+    SHA512 spark-$RELEASE_VERSION-bin-$NAME.tgz > \
+    spark-$RELEASE_VERSION-bin-$NAME.tgz.sha
+  rm -rf spark-$RELEASE_VERSION-bin-$NAME
+}
+
+make_binary_release "hadoop1"  "-Dhadoop.version=1.0.4"
+make_binary_release "cdh4"     "-Dhadoop.version=2.0.0-mr1-cdh4.2.0"
+make_binary_release "hadoop2"  "-Pyarn -Dhadoop.version=2.2.0 -Dyarn.version=2.2.0"
+
+# Copy data
+echo "Copying release tarballs"
+ssh $USER_NAME@people.apache.org \
+  mkdir /home/$USER_NAME/public_html/spark-$RELEASE_VERSION-$RC_NAME
+rc_folder=spark-$RELEASE_VERSION-$RC_NAME
+scp spark* \
+  $USER_NAME@people.apache.org:/home/$USER_NAME/public_html/$rc_folder/
+
+# Docs
+cd spark
+cd docs
+PRODUCTION=1 jekyll build
+echo "Copying release documentation"
+rc_docs_folder=${rc_folder}-docs
+rsync -r _site/* $USER_NAME@people.apache.org /home/$USER_NAME/public_html/$rc_docs_folder
+
+echo "Release $RELEASE_VERSION completed:"
+echo "Git tag:\t $GIT_TAG"
+echo "Release commit:\t $release_hash"
+echo "Binary location:\t http://people.apache.org/~$USER_NAME/$rc_folder"
+echo "Doc location:\t http://people.apache.org/~$USER_NAME/$rc_docs_folder"

http://git-wip-us.apache.org/repos/asf/spark/blob/7ec78bc5/dev/merge_spark_pr.py
----------------------------------------------------------------------
diff --git a/dev/merge_spark_pr.py b/dev/merge_spark_pr.py
new file mode 100755
index 0000000..e8f78fc
--- /dev/null
+++ b/dev/merge_spark_pr.py
@@ -0,0 +1,200 @@
+#!/usr/bin/python
+
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Utility for creating well-formed pull request merges and pushing them to Apache.
+#   usage: ./apache-pr-merge.py    (see config env vars below)
+#
+# This utility assumes you already have local a Spark git folder and that you
+# have added remotes corresponding to both (i) the github apache Spark 
+# mirror and (ii) the apache git repo.
+
+import json
+import os
+import subprocess
+import sys
+import tempfile
+import urllib2
+
+# Location of your Spark git development area
+SPARK_HOME = os.environ.get("SPARK_HOME", "/home/patrick/Documents/spark")
+# Remote name which points to the Gihub site
+PR_REMOTE_NAME = os.environ.get("PR_REMOTE_NAME", "apache-github")
+# Remote name which points to Apache git
+PUSH_REMOTE_NAME = os.environ.get("PUSH_REMOTE_NAME", "apache")
+
+GIT_API_BASE = "https://api.github.com/repos/apache/spark"
+# Prefix added to temporary branches
+BRANCH_PREFIX = "PR_TOOL"
+
+os.chdir(SPARK_HOME)
+
+def get_json(url):
+  try:
+    return json.load(urllib2.urlopen(url))
+  except urllib2.HTTPError as e:
+    print "Unable to fetch URL, exiting: %s" % url
+    sys.exit(-1)
+
+def fail(msg):
+  print msg
+  clean_up()
+  sys.exit(-1)
+
+def run_cmd(cmd):
+  if isinstance(cmd, list):
+    return subprocess.check_output(cmd)
+  else:
+    return subprocess.check_output(cmd.split(" "))
+
+def continue_maybe(prompt):
+  result = raw_input("\n%s (y/n): " % prompt)
+  if result.lower() != "y":
+    fail("Okay, exiting")
+
+original_head = run_cmd("git rev-parse HEAD")[:8]
+
+def clean_up():
+  print "Restoring head pointer to %s" % original_head
+  run_cmd("git checkout %s" % original_head)
+
+  branches = run_cmd("git branch").replace(" ", "").split("\n")
+
+  for branch in filter(lambda x: x.startswith(BRANCH_PREFIX), branches):
+    print "Deleting local branch %s" % branch
+    run_cmd("git branch -D %s" % branch)
+
+# merge the requested PR and return the merge hash
+def merge_pr(pr_num, target_ref):
+  pr_branch_name = "%s_MERGE_PR_%s" % (BRANCH_PREFIX, pr_num)
+  target_branch_name = "%s_MERGE_PR_%s_%s" % (BRANCH_PREFIX, pr_num, target_ref.upper())
+  run_cmd("git fetch %s pull/%s/head:%s" % (PR_REMOTE_NAME, pr_num, pr_branch_name))
+  run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, target_ref, target_branch_name))
+  run_cmd("git checkout %s" % target_branch_name)
+  
+  run_cmd(['git', 'merge', pr_branch_name, '--squash'])
+
+  commit_authors = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name, 
+    '--pretty=format:%an <%ae>']).split("\n")
+  distinct_authors = sorted(set(commit_authors), key=lambda x: commit_authors.count(x), reverse=True)
+  primary_author = distinct_authors[0]
+  commits = run_cmd(['git', 'log', 'HEAD..%s' % pr_branch_name, 
+    '--pretty=format:%h [%an] %s']).split("\n\n")
+
+  merge_message_flags = []
+
+  for p in [title, body]:
+    merge_message_flags += ["-m", p]
+
+  authors = "\n".join(["Author: %s" % a for a in distinct_authors])
+
+  merge_message_flags += ["-m", authors]
+
+  # The string "Closes #%s" string is required for GitHub to correctly close the PR
+  merge_message_flags += ["-m",
+    "Closes #%s from %s and squashes the following commits:" % (pr_num, pr_repo_desc)]
+  for c in commits:
+    merge_message_flags += ["-m", c]
+
+  run_cmd(['git', 'commit', '--author="%s"' % primary_author] + merge_message_flags)
+
+  continue_maybe("Merge complete (local ref %s). Push to %s?" % (
+    target_branch_name, PUSH_REMOTE_NAME))
+
+  try:
+    run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, target_branch_name, target_ref))
+  except Exception as e:
+    clean_up()
+    fail("Exception while pushing: %s" % e)
+  
+  merge_hash = run_cmd("git rev-parse %s" % target_branch_name)[:8]
+  clean_up()
+  print("Pull request #%s merged!" % pr_num)
+  print("Merge hash: %s" % merge_hash)
+  return merge_hash
+
+
+def maybe_cherry_pick(pr_num, merge_hash, default_branch):
+  continue_maybe("Would you like to pick %s into another branch?" % merge_hash)
+  pick_ref = raw_input("Enter a branch name [%s]: " % default_branch)
+  if pick_ref == "":
+    pick_ref = default_branch
+
+  pick_branch_name = "%s_PICK_PR_%s_%s" % (BRANCH_PREFIX, pr_num, pick_ref.upper())
+
+  run_cmd("git fetch %s %s:%s" % (PUSH_REMOTE_NAME, pick_ref, pick_branch_name))
+  run_cmd("git checkout %s" % pick_branch_name)
+  run_cmd("git cherry-pick -sx %s" % merge_hash)
+  
+  continue_maybe("Pick complete (local ref %s). Push to %s?" % (
+    pick_branch_name, PUSH_REMOTE_NAME))
+
+  try:
+    run_cmd('git push %s %s:%s' % (PUSH_REMOTE_NAME, pick_branch_name, pick_ref))
+  except Exception as e:
+    clean_up()
+    fail("Exception while pushing: %s" % e)
+
+  pick_hash = run_cmd("git rev-parse %s" % pick_branch_name)[:8]
+  clean_up()
+
+  print("Pull request #%s picked into %s!" % (pr_num, pick_ref))
+  print("Pick hash: %s" % pick_hash)
+
+branches = get_json("%s/branches" % GIT_API_BASE)
+branch_names = filter(lambda x: x.startswith("branch-"), [x['name'] for x in branches])
+# Assumes branch names can be sorted lexicographically
+latest_branch = sorted(branch_names, reverse=True)[0]
+
+pr_num = raw_input("Which pull request would you like to merge? (e.g. 34): ")
+pr = get_json("%s/pulls/%s" % (GIT_API_BASE, pr_num))
+
+url = pr["url"]
+title = pr["title"]
+body = pr["body"]
+target_ref = pr["base"]["ref"]
+user_login = pr["user"]["login"]
+base_ref = pr["head"]["ref"]
+pr_repo_desc = "%s/%s" % (user_login, base_ref)
+
+if pr["merged"] == True:
+  print "Pull request %s has already been merged, assuming you want to backport" % pr_num
+  merge_commit_desc = run_cmd(['git', 'log', '--merges', '--first-parent', 
+    '--grep=pull request #%s' % pr_num, '--oneline']).split("\n")[0]
+  if merge_commit_desc == "":
+    fail("Couldn't find any merge commit for #%s, you may need to update HEAD." % pr_num)
+
+  merge_hash = merge_commit_desc[:7]  
+  message = merge_commit_desc[8:]
+  
+  print "Found: %s" % message
+  maybe_cherry_pick(pr_num, merge_hash, latest_branch)
+  sys.exit(0)
+
+if bool(pr["mergeable"]) == False:
+  fail("Pull request %s is not mergeable in its current form" % pr_num)
+
+print ("\n=== Pull Request #%s ===" % pr_num)
+print("title\t%s\nsource\t%s\ntarget\t%s\nurl\t%s" % (
+  title, pr_repo_desc, target_ref, url))
+continue_maybe("Proceed with merging pull request #%s?" % pr_num)
+
+merge_hash = merge_pr(pr_num, target_ref)
+
+while True:
+  maybe_cherry_pick(pr_num, merge_hash, latest_branch)