You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@pig.apache.org by ro...@apache.org on 2016/07/25 19:32:55 UTC
svn commit: r1754043 - in /pig/trunk: CHANGES.txt
src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java
Author: rohini
Date: Mon Jul 25 19:32:55 2016
New Revision: 1754043
URL: http://svn.apache.org/viewvc?rev=1754043&view=rev
Log:
PIG-4957: See Received kill signal message for a normal run after PIG-4921 (rohini)
Modified:
pig/trunk/CHANGES.txt
pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java
Modified: pig/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/pig/trunk/CHANGES.txt?rev=1754043&r1=1754042&r2=1754043&view=diff
==============================================================================
--- pig/trunk/CHANGES.txt (original)
+++ pig/trunk/CHANGES.txt Mon Jul 25 19:32:55 2016
@@ -36,6 +36,8 @@ OPTIMIZATIONS
BUG FIXES
+PIG-4957: See "Received kill signal" message for a normal run after PIG-4921 (rohini)
+
PIG-4953: Predicate push-down will not run filters for single unary expressions (rdblue via daijy)
PIG-4940: Predicate push-down filtering unary expressions can be pushed (rdblue via daijy)
Modified: pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java
URL: http://svn.apache.org/viewvc/pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java?rev=1754043&r1=1754042&r2=1754043&view=diff
==============================================================================
--- pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java (original)
+++ pig/trunk/src/org/apache/pig/backend/hadoop/executionengine/mapReduceLayer/MapReduceLauncher.java Mon Jul 25 19:32:55 2016
@@ -106,8 +106,8 @@ public class MapReduceLauncher extends L
@Override
public void kill() {
try {
- log.info("Received kill signal");
- if (jc!=null) {
+ if (jc != null && jc.getRunningJobs().size() > 0) {
+ log.info("Received kill signal");
for (Job job : jc.getRunningJobs()) {
HadoopShims.killJob(job);
log.info("Job " + job.getAssignedJobID() + " killed");