You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@asterixdb.apache.org by am...@apache.org on 2016/02/22 23:34:49 UTC

[01/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Repository: incubator-asterixdb
Updated Branches:
  refs/heads/master 825945283 -> ac683db08


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/resources/tests.txt
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/tests.txt b/asterix-external-data/src/test/resources/tests.txt
new file mode 100644
index 0000000..cc82b6e
--- /dev/null
+++ b/asterix-external-data/src/test/resources/tests.txt
@@ -0,0 +1,365 @@
+######################################################################
+#
+# File: tests.txt
+# Purpose: This is a series of tests for the ClassAd library. 
+#          Currently there is no good documentation for how to write
+#          tests, you'll just have to look in test_classads.C
+#
+######################################################################
+
+######################################################################
+#
+# Test the lexer
+#
+######################################################################
+test-lexer-one-token    123    LEX_INTEGER_VALUE
+test-lexer-one-token 123.456 LEX_REAL_VALUE
+test-lexer-one-token true LEX_BOOLEAN_VALUE
+test-lexer-one-token false LEX_BOOLEAN_VALUE
+test-lexer-one-token "blah" LEX_STRING_VALUE
+test-lexer-one-token "new\nline" LEX_STRING_VALUE
+test-lexer-one-token "blue bibs be big" LEX_STRING_VALUE
+test-lexer-one-token blah LEX_IDENTIFIER
+test-lexer-one-token __ LEX_IDENTIFIER
+test-lexer-one-token undefined LEX_UNDEFINED_VALUE
+test-lexer-one-token error LEX_ERROR_VALUE
+test-lexer-one-token . LEX_SELECTION
+test-lexer-one-token * LEX_MULTIPLY
+test-lexer-one-token / LEX_DIVIDE
+test-lexer-one-token % LEX_MODULUS
+test-lexer-one-token + LEX_PLUS
+test-lexer-one-token - LEX_MINUS
+test-lexer-one-token & LEX_BITWISE_AND
+test-lexer-one-token | LEX_BITWISE_OR
+test-lexer-one-token ~ LEX_BITWISE_NOT
+test-lexer-one-token ^ LEX_BITWISE_XOR
+test-lexer-one-token << LEX_LEFT_SHIFT
+test-lexer-one-token >> LEX_RIGHT_SHIFT
+test-lexer-one-token >>> LEX_URIGHT_SHIFT
+test-lexer-one-token && LEX_LOGICAL_AND
+test-lexer-one-token || LEX_LOGICAL_OR
+test-lexer-one-token ! LEX_LOGICAL_NOT
+test-lexer-one-token < LEX_LESS_THAN
+test-lexer-one-token <= LEX_LESS_OR_EQUAL
+test-lexer-one-token > LEX_GREATER_THAN
+test-lexer-one-token >= LEX_GREATER_OR_EQUAL
+test-lexer-one-token == LEX_EQUAL
+test-lexer-one-token != LEX_NOT_EQUAL
+test-lexer-one-token is LEX_META_EQUAL
+test-lexer-one-token isnt LEX_META_NOT_EQUAL
+test-lexer-one-token = LEX_BOUND_TO
+test-lexer-one-token ? LEX_QMARK
+test-lexer-one-token : LEX_COLON
+test-lexer-one-token ; LEX_SEMICOLON
+test-lexer-one-token , LEX_COMMA
+test-lexer-one-token [ LEX_OPEN_BOX
+test-lexer-one-token ] LEX_CLOSE_BOX
+test-lexer-one-token ( LEX_OPEN_PAREN
+test-lexer-one-token ) LEX_CLOSE_PAREN
+test-lexer-one-token { LEX_OPEN_BRACE
+test-lexer-one-token } LEX_CLOSE_BRACE
+
+# I think this should evaluate to LEX_BACKSLASH. Apparently
+# it's not actually used anywhere, so it's an error instead. 
+test-lexer-one-token \ LEX_TOKEN_ERROR
+
+######################################################################
+#
+# ClassAds
+#
+######################################################################
+begin-classad Job-1
+[
+  Requirements = (other.Type == "Machine" && other.memory >= 4000);
+  Type = "Job";
+  Memoryused = 6000;
+]
+end-classad
+
+begin-classad Machine-1
+[
+  Type = "machine";
+  Requirements = (other.Type == "job" && other.memoryused < 8000);
+  Memory = 5000;
+]
+end-classad
+
+begin-classad Machine-2
+[
+  Type = "Machine";
+  Requirements = other.mytype == "Job";
+  Memory = 3000
+]
+end-classad
+
+begin-classad Machine-3
+[
+  Type = "Machine";
+  Requirements = other.mytype == "Job";
+  Memory = 6000
+]
+end-classad
+
+begin-classad Misc
+[
+  Type = "Misc";
+  Self = [
+           one = "foo";
+		   two = "bar";
+           rank = Other[StrCat(one, two)];
+         ];
+  Other = [
+            one = 1;
+            two = 2;
+            foobar = 15
+          ];
+  Buddha = [
+             which = Self.which;
+           ];
+  ClassAds = { [a = 1], [a = 2], [a = 3] };
+  Set = ClassAds.a;
+  SizeSet = size(Set);
+  SizeZero = size({});
+  SizeOne = size({1});
+  SizeTwo = size({1, 2});
+  Sum = sum(Set);
+  Average = avg(Set);
+  Min = min(Set);
+  Max = max(Set);
+  AllSmall = allcompare("<", Set, 100);
+  AllBig   = allcompare(">", Set, 100); 
+  AnyTwo   = anycompare("==", Set, 2);
+  AllTwo   = allcompare("==", Set, 2);
+  A = 1;
+  B = true;
+  C = !A;
+  D = !B;
+  R1 = regexp(".*\.cs\.uchicago\.edu",  "gargoyle.cs1uchicago.edu");
+  R2 = regexp(".*\.cs\.wisc\.edu",     "beak.cs.wisc.edu");
+  R3 = regexp(".*\.cs\.uchicago\.edu", "beak.cs.wisc.edu");
+  R4 = regexp(".*\.cs\.wisc\.edu",     "gargoyle.cs.uchicago.edu");
+  R5 = regexp(".*\.cs\.wisc\.edu|.*\.cs\.uchicago\.edu",     
+              "gargoyle\.cs\.uchicago.edu");
+  R6 = regexp(".*\.cs\.wisc\.edu|.*\.cs\.uchicago\.edu",     
+              "beak.cs.wisc\.edu");
+
+]
+end-classad
+
+begin-classad Motherboard
+[
+  have_match =    machine_enclosure.machine.Requirements 
+              && job_enclosure.job.Requirements
+              && storage_enclosure.storage.Requirements;
+
+  machine_enclosure = 
+  [ 
+    job = job_enclosure.job;
+	storage = storage_enclosure.storage;
+    machine = [
+                Type="Machine"; 
+                RAM=6000;
+                Requirements = (job.MemoryNeeded <= RAM);
+              ];
+  ];
+
+  job_enclosure = 
+  [
+    machine = machine_enclosure.machine;
+	storage = storage_enclosure.storage;
+    job = [
+            Type="Job";
+            MemoryNeeded = 6000;
+            DiskSpaceNeeded = 8000;
+            Requirements = (machine.RAM >= MemoryNeeded
+                         && storage.FreeDiskSpace > DiskSpaceNeeded)
+          ];
+  ];
+
+  storage_enclosure = 
+  [
+    job = job_enclosure.job;
+    machine = machine_enclosure.machine;
+    storage = [
+         Type = "Storage";
+         FreeDiskSpace = 10000;
+         Requirements = job.DiskSpaceNeeded < FreeDiskSpace;
+         ];
+
+  ];
+]
+end-classad
+
+evaluate Motherboard job_enclosure.job.Requirements
+evaluate Motherboard machine_enclosure.job.Requirements
+evaluate Motherboard storage_enclosure.storage.Requirements
+evaluate Motherboard have_match
+
+######################################################################
+#
+# Basic Evaluation Tests
+#
+######################################################################
+evaluate Job-1 TestTernary
+evaluate Misc Self.rank
+evaluate Machine-1 memory
+evaluate Misc Set
+evaluate Misc Sum
+evaluate Misc Average
+evaluate Misc Min
+evaluate Misc Max
+evaluate Misc AllSmall
+evaluate Misc AllBig
+evaluate Misc AnyTwo
+evaluate Misc AllTwo
+evaluate Misc ClassAds.b
+evaluate Misc ClassAds.c
+evaluate Misc All_Undefined
+evaluate Misc Any_Undefined
+evaluate Misc A
+evaluate Misc B
+evaluate Misc C
+evaluate Misc D
+evaluate Misc SizeSet;
+evaluate Misc SizeZero;
+evaluate Misc SizeOne;
+evaluate Misc SizeTwo;
+evaluate Misc R1;
+evaluate Misc R2;
+evaluate Misc R3;
+evaluate Misc R4;
+evaluate Misc R5;
+evaluate Misc R6;
+
+######################################################################
+#
+# Matching tests
+#
+######################################################################
+test-match symmetricMatch Job-1 Machine-1 ExpectMatch
+test-match leftMatchesRight Job-1 Machine-1 ExpectMatch
+test-match rightMatchesLeft Job-1 Machine-1 ExpectMatch
+test-match symmetricMatch Job-1 Machine-2 ExpectDontMatch
+
+######################################################################
+#
+# Collections
+#
+######################################################################
+#make-collection Machines machines-log
+make-collection Machines
+add-to-collection Machines Machine-1
+add-to-collection Machines Machine-2
+add-to-collection Machines Machine-3
+
+create-subview Machines root Machine-View (other.Memory >= 4000)
+check-in-view Machines Machine-View Machine-1 ExpectIn
+check-in-view Machines Machine-View Machine-2 ExpectNotIn
+check-in-view Machines Machine-View Machine-3 ExpectIn
+
+create-subview Machines Machine-View BigMachine-View (other.Memory > 5000)
+check-in-view Machines BigMachine-View Machine-1 ExpectNotIn
+check-in-view Machines BigMachine-View Machine-2 ExpectNotIn
+check-in-view Machines BigMachine-View Machine-3 ExpectIn
+
+#truncate-log Machines
+
+begin-classad Group-1
+[
+  Type = "Group";
+  Users = {"Alain", "Peter"};
+]
+end-classad
+
+begin-classad Group-2
+[
+  Type = "Group";
+  Users = {"Annalisa", "David"};
+]
+end-classad
+
+#make-collection Groups groups-log
+make-collection Groups groups-log
+add-to-collection Groups Group-1
+add-to-collection Groups Group-2
+
+begin-classad List-Eval
+[ a = { x }; x = 1 ]
+end-classad
+
+evaluate Misc SizeOne;
+evaluate Misc SizeTwo;
+evaluate List-Eval a[0]
+
+begin-classad Lexer-Fault
+[ a=!b; b=true; ]
+end-classad
+evaluate Lexer-Fault b
+evaluate Lexer-Fault a
+
+begin-classad Octal
+[ a = "\101\044\44\1"; /* Should be A(( */ ]
+end-classad
+evaluate Octal a
+
+begin-classad Floats
+[a = 0.7 * 4.5; ]
+end-classad
+
+begin-classad Quoted-Names
+[
+  'a' = 4;
+  'b.##$%' = 5;
+]
+end-classad
+
+begin-classad Times
+[
+  Abs1 = absTime("2003-09-03T");
+  Rel1 = relTime("2+25:14:16.123");
+]
+end-classad
+
+begin-classad Numbers
+[
+  X = 4.3;
+  Y = real("4.3");
+  Not = real("NaN");
+  Infinite = real("INF");
+  Negative_Infinite = real("-INF");
+]
+end-classad
+
+print-classad Floats
+print-classad Quoted-Names
+print-classad Times
+print-classad Numbers
+print-classad-xml Numbers
+
+begin-classad Loop
+[
+  attr = a//b
+]
+end-classad 
+
+#begin-classad Crash
+#[
+#  attr = a<b>
+#]
+#end-classad 
+
+begin-classad policy
+[
+  type = 'machine';
+  access_times = 
+    [
+        globus = [start = 1900; end = 2100;];
+        condor = [start = 100; end = 500;];
+    ]
+]
+end-classad
+
+evaluate policy access_times
+evaluate policy access_times["globus"]
+evaluate policy access_times["globus"].start
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql
new file mode 100644
index 0000000..0f40b07
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.1.ddl.aql
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create an adapter that uses external parser to parse data from files
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+use dataverse externallibtest;
+
+create type Classad as open {
+  GlobalJobId: int64
+}
+
+create external dataset Condor(Classad) using localfs(
+("path"="asterix_nc1://data/external-parser/jobads.new"),
+("reader"="adm"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"),
+("reader-stream"="localfs"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.2.query.aql
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.2.query.aql b/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.2.query.aql
new file mode 100644
index 0000000..ca2d832
--- /dev/null
+++ b/asterix-installer/src/test/resources/integrationts/library/queries/library-parsers/record-parser/record-parser.2.query.aql
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create an adapter that uses external parser to parse data from files
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+use dataverse externallibtest;
+
+for $x in dataset Condor
+return $x;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
----------------------------------------------------------------------
diff --git a/asterix-installer/src/test/resources/integrationts/library/testsuite.xml b/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
index 9d62602..87a53c8 100644
--- a/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
+++ b/asterix-installer/src/test/resources/integrationts/library/testsuite.xml
@@ -17,6 +17,13 @@
  ! under the License.
  !-->
 <test-suite xmlns="urn:xml.testframework.asterix.apache.org" ResultOffsetPath="results" QueryOffsetPath="queries" QueryFileExtension=".aql">
+<test-group name="library-parsers">
+    <test-case FilePath="library-parsers">
+      <compilation-unit name="record-parser">
+        <output-dir compare="Text">record-parser</output-dir>
+      </compilation-unit>
+    </test-case>
+    </test-group>
   <test-group name="library-functions">
     <test-case FilePath="library-functions">
       <compilation-unit name="mysum">

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
index f3523da..48d93f5 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/declared/AqlMetadataProvider.java
@@ -66,6 +66,7 @@ import org.apache.asterix.external.operators.ExternalRTreeSearchOperatorDescript
 import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
 import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
 import org.apache.asterix.external.provider.AdapterFactoryProvider;
+import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.FeedConstants;
 import org.apache.asterix.formats.base.IDataFormat;
 import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
@@ -545,6 +546,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
             Map<String, String> configuration, IAType itemType, boolean isPKAutoGenerated,
             List<List<String>> primaryKeys) throws AlgebricksException {
         try {
+            configuration.put(ExternalDataConstants.KEY_DATAVERSE, dataset.getDataverseName());
             IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(adapterName, configuration,
                     (ARecordType) itemType);
 
@@ -568,7 +570,7 @@ public class AqlMetadataProvider implements IMetadataProvider<AqlSourceId, Strin
 
             return adapterFactory;
         } catch (Exception e) {
-            throw new AlgebricksException("Unable to create adapter " + e);
+            throw new AlgebricksException("Unable to create adapter", e);
         }
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
----------------------------------------------------------------------
diff --git a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
index 7ef51cb..1f815c0 100644
--- a/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
+++ b/asterix-metadata/src/main/java/org/apache/asterix/metadata/feeds/FeedMetadataUtil.java
@@ -31,7 +31,6 @@ import java.util.logging.Logger;
 
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.MetadataConstants;
-import org.apache.asterix.common.dataflow.AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor;
 import org.apache.asterix.common.dataflow.AsterixLSMTreeInsertDeleteOperatorDescriptor;
 import org.apache.asterix.common.exceptions.AsterixException;
 import org.apache.asterix.common.functions.FunctionSignature;
@@ -46,6 +45,7 @@ import org.apache.asterix.external.library.ExternalLibraryManager;
 import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
 import org.apache.asterix.external.operators.FeedMetaOperatorDescriptor;
 import org.apache.asterix.external.provider.AdapterFactoryProvider;
+import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataUtils;
 import org.apache.asterix.metadata.MetadataException;
@@ -158,17 +158,13 @@ public class FeedMetadataUtil {
                         orig.getFeedConnectionId(), orig.getSourceFeedId(), (ARecordType) orig.getOutputType(),
                         orig.getRecordDescriptor(), orig.getFeedPolicyProperties(), orig.getSubscriptionLocation());
                 oldNewOID.put(opDesc.getOperatorId(), fiop.getOperatorId());
-            } else if (opDesc instanceof AsterixLSMTreeInsertDeleteOperatorDescriptor) {
+            } else if (opDesc instanceof AsterixLSMTreeInsertDeleteOperatorDescriptor
+                    && ((AsterixLSMTreeInsertDeleteOperatorDescriptor) opDesc).isPrimary()) {
+                // only introduce store before primary index
                 operandId = ((AsterixLSMTreeInsertDeleteOperatorDescriptor) opDesc).getIndexName();
                 metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc, feedPolicyProperties,
                         FeedRuntimeType.STORE, false, operandId);
                 oldNewOID.put(opDesc.getOperatorId(), metaOp.getOperatorId());
-            } else if (opDesc instanceof AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor) {
-                operandId = ((AsterixLSMInvertedIndexInsertDeleteOperatorDescriptor) opDesc).getIndexName();
-                metaOp = new FeedMetaOperatorDescriptor(altered, feedConnectionId, opDesc, feedPolicyProperties,
-                        FeedRuntimeType.STORE, false, operandId);
-                oldNewOID.put(opDesc.getOperatorId(), metaOp.getOperatorId());
-
             } else {
                 FeedRuntimeType runtimeType = null;
                 boolean enableSubscriptionMode = false;
@@ -487,6 +483,7 @@ public class FeedMetadataUtil {
                 adapterEntity = MetadataManager.INSTANCE.getAdapter(mdTxnCtx, feed.getDataverseName(), adapterName);
             }
 
+            ExternalDataCompatibilityUtils.addCompatabilityParameters(adapterName, adapterOutputType, configuration);
             if (adapterEntity != null) {
                 adapterType = adapterEntity.getType();
                 adapterFactoryClassname = adapterEntity.getClassname();
@@ -511,8 +508,7 @@ public class FeedMetadataUtil {
             feedProps = new Triple<IAdapterFactory, ARecordType, IDataSourceAdapter.AdapterType>(adapterFactory,
                     adapterOutputType, adapterType);
         } catch (Exception e) {
-            e.printStackTrace();
-            throw new AlgebricksException("unable to create adapter " + e);
+            throw new AlgebricksException("unable to create adapter", e);
         }
         return feedProps;
     }


[26/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedJobNotificationHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedJobNotificationHandler.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedJobNotificationHandler.java
new file mode 100644
index 0000000..d729680
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedJobNotificationHandler.java
@@ -0,0 +1,742 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.Date;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.commons.lang3.StringUtils;
+import org.apache.asterix.app.external.FeedLifecycleListener.Message;
+import org.apache.asterix.app.external.FeedWorkCollection.SubscribeFeedWork;
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
+import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
+import org.apache.asterix.external.feed.api.IFeedJoint.State;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+import org.apache.asterix.external.feed.management.FeedWorkManager;
+import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.watch.FeedActivity;
+import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
+import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
+import org.apache.asterix.external.feed.watch.FeedJobInfo.JobType;
+import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedMetaOperatorDescriptor;
+import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
+import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
+import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobInfo;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.api.job.JobStatus;
+import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexInsertUpdateDeleteOperatorDescriptor;
+
+public class FeedJobNotificationHandler implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
+
+    private final LinkedBlockingQueue<Message> inbox;
+    private final Map<FeedConnectionId, List<IFeedLifecycleEventSubscriber>> eventSubscribers;
+
+    private final Map<JobId, FeedJobInfo> jobInfos;
+    private final Map<FeedId, FeedIntakeInfo> intakeJobInfos;
+    private final Map<FeedConnectionId, FeedConnectJobInfo> connectJobInfos;
+    private final Map<FeedId, List<IFeedJoint>> feedPipeline;
+    private final Map<FeedConnectionId, Pair<IIntakeProgressTracker, Long>> feedIntakeProgressTrackers;
+
+    public FeedJobNotificationHandler(LinkedBlockingQueue<Message> inbox) {
+        this.inbox = inbox;
+        this.jobInfos = new HashMap<JobId, FeedJobInfo>();
+        this.intakeJobInfos = new HashMap<FeedId, FeedIntakeInfo>();
+        this.connectJobInfos = new HashMap<FeedConnectionId, FeedConnectJobInfo>();
+        this.feedPipeline = new HashMap<FeedId, List<IFeedJoint>>();
+        this.eventSubscribers = new HashMap<FeedConnectionId, List<IFeedLifecycleEventSubscriber>>();
+        this.feedIntakeProgressTrackers = new HashMap<FeedConnectionId, Pair<IIntakeProgressTracker, Long>>();
+    }
+
+    @Override
+    public void run() {
+        Message mesg;
+        while (true) {
+            try {
+                mesg = inbox.take();
+                switch (mesg.messageKind) {
+                    case JOB_START:
+                        handleJobStartMessage(mesg);
+                        break;
+                    case JOB_FINISH:
+                        handleJobFinishMessage(mesg);
+                        break;
+                }
+            } catch (Exception e) {
+                e.printStackTrace();
+            }
+
+        }
+    }
+
+    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
+            IIntakeProgressTracker feedIntakeProgressTracker) {
+        if (feedIntakeProgressTrackers.get(connectionId) == null) {
+            this.feedIntakeProgressTrackers.put(connectionId, new Pair<IIntakeProgressTracker, Long>(
+                    feedIntakeProgressTracker, 0L));
+        } else {
+            throw new IllegalStateException(" Progress tracker for connection " + connectionId
+                    + " is alreader registered");
+        }
+    }
+
+    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
+        this.feedIntakeProgressTrackers.remove(connectionId);
+    }
+
+    public void updateTrackingInformation(StorageReportFeedMessage srm) {
+        Pair<IIntakeProgressTracker, Long> p = feedIntakeProgressTrackers.get(srm.getConnectionId());
+        if (p != null && p.second < srm.getLastPersistedTupleIntakeTimestamp()) {
+            p.second = srm.getLastPersistedTupleIntakeTimestamp();
+            p.first.notifyIngestedTupleTimestamp(p.second);
+        }
+    }
+
+    public Collection<FeedIntakeInfo> getFeedIntakeInfos() {
+        return intakeJobInfos.values();
+    }
+
+    public Collection<FeedConnectJobInfo> getFeedConnectInfos() {
+        return connectJobInfos.values();
+    }
+
+    public void registerFeedJoint(IFeedJoint feedJoint) {
+        List<IFeedJoint> feedJointsOnPipeline = feedPipeline.get(feedJoint.getOwnerFeedId());
+        if (feedJointsOnPipeline == null) {
+            feedJointsOnPipeline = new ArrayList<IFeedJoint>();
+            feedPipeline.put(feedJoint.getOwnerFeedId(), feedJointsOnPipeline);
+            feedJointsOnPipeline.add(feedJoint);
+        } else {
+            if (!feedJointsOnPipeline.contains(feedJoint)) {
+                feedJointsOnPipeline.add(feedJoint);
+            } else {
+                throw new IllegalArgumentException("Feed joint " + feedJoint + " already registered");
+            }
+        }
+    }
+
+    public void registerFeedIntakeJob(FeedId feedId, JobId jobId, JobSpecification jobSpec) throws HyracksDataException {
+        if (jobInfos.get(jobId) != null) {
+            throw new IllegalStateException("Feed job already registered");
+        }
+
+        List<IFeedJoint> joints = feedPipeline.get(feedId);
+        IFeedJoint intakeJoint = null;
+        for (IFeedJoint joint : joints) {
+            if (joint.getType().equals(IFeedJoint.FeedJointType.INTAKE)) {
+                intakeJoint = joint;
+                break;
+            }
+        }
+
+        if (intakeJoint != null) {
+            FeedIntakeInfo intakeJobInfo = new FeedIntakeInfo(jobId, FeedJobState.CREATED, FeedJobInfo.JobType.INTAKE,
+                    feedId, intakeJoint, jobSpec);
+            intakeJobInfos.put(feedId, intakeJobInfo);
+            jobInfos.put(jobId, intakeJobInfo);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Registered feed intake [" + jobId + "]" + " for feed " + feedId);
+            }
+        } else {
+            throw new HyracksDataException("Could not register feed intake job [" + jobId + "]" + " for feed  "
+                    + feedId);
+        }
+    }
+
+    public void registerFeedCollectionJob(FeedId sourceFeedId, FeedConnectionId connectionId, JobId jobId,
+            JobSpecification jobSpec, Map<String, String> feedPolicy) {
+        if (jobInfos.get(jobId) != null) {
+            throw new IllegalStateException("Feed job already registered");
+        }
+
+        List<IFeedJoint> feedJoints = feedPipeline.get(sourceFeedId);
+        FeedConnectionId cid = null;
+        IFeedJoint sourceFeedJoint = null;
+        for (IFeedJoint joint : feedJoints) {
+            cid = joint.getReceiver(connectionId);
+            if (cid != null) {
+                sourceFeedJoint = joint;
+                break;
+            }
+        }
+
+        if (cid != null) {
+            FeedConnectJobInfo cInfo = new FeedConnectJobInfo(jobId, FeedJobState.CREATED, connectionId,
+                    sourceFeedJoint, null, jobSpec, feedPolicy);
+            jobInfos.put(jobId, cInfo);
+            connectJobInfos.put(connectionId, cInfo);
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Registered feed connection [" + jobId + "]" + " for feed " + connectionId);
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Could not register feed collection job [" + jobId + "]" + " for feed connection "
+                        + connectionId);
+            }
+        }
+
+    }
+
+    public void deregisterFeedIntakeJob(JobId jobId) {
+        if (jobInfos.get(jobId) == null) {
+            throw new IllegalStateException(" Feed Intake job not registered ");
+        }
+
+        FeedIntakeInfo info = (FeedIntakeInfo) jobInfos.get(jobId);
+        jobInfos.remove(jobId);
+        intakeJobInfos.remove(info.getFeedId());
+
+        if (!info.getState().equals(FeedJobState.UNDER_RECOVERY)) {
+            List<IFeedJoint> joints = feedPipeline.get(info.getFeedId());
+            joints.remove(info.getIntakeFeedJoint());
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Deregistered feed intake job [" + jobId + "]");
+            }
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Not removing feed joint as intake job is in " + FeedJobState.UNDER_RECOVERY + " state.");
+            }
+        }
+
+    }
+
+    private void handleJobStartMessage(Message message) throws Exception {
+        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
+        switch (jobInfo.getJobType()) {
+            case INTAKE:
+                handleIntakeJobStartMessage((FeedIntakeInfo) jobInfo);
+                break;
+            case FEED_CONNECT:
+                handleCollectJobStartMessage((FeedConnectJobInfo) jobInfo);
+                break;
+        }
+
+    }
+
+    private void handleJobFinishMessage(Message message) throws Exception {
+        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
+        switch (jobInfo.getJobType()) {
+            case INTAKE:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Intake Job finished for feed intake " + jobInfo.getJobId());
+                }
+                handleFeedIntakeJobFinishMessage((FeedIntakeInfo) jobInfo, message);
+                break;
+            case FEED_CONNECT:
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Collect Job finished for  " + (FeedConnectJobInfo) jobInfo);
+                }
+                handleFeedCollectJobFinishMessage((FeedConnectJobInfo) jobInfo);
+                break;
+        }
+
+    }
+
+    private synchronized void handleIntakeJobStartMessage(FeedIntakeInfo intakeJobInfo) throws Exception {
+        List<OperatorDescriptorId> intakeOperatorIds = new ArrayList<OperatorDescriptorId>();
+        Map<OperatorDescriptorId, IOperatorDescriptor> operators = intakeJobInfo.getSpec().getOperatorMap();
+        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
+            IOperatorDescriptor opDesc = entry.getValue();
+            if (opDesc instanceof FeedIntakeOperatorDescriptor) {
+                intakeOperatorIds.add(opDesc.getOperatorId());
+            }
+        }
+
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(intakeJobInfo.getJobId());
+        List<String> intakeLocations = new ArrayList<String>();
+        for (OperatorDescriptorId intakeOperatorId : intakeOperatorIds) {
+            Map<Integer, String> operatorLocations = info.getOperatorLocations().get(intakeOperatorId);
+            int nOperatorInstances = operatorLocations.size();
+            for (int i = 0; i < nOperatorInstances; i++) {
+                intakeLocations.add(operatorLocations.get(i));
+            }
+        }
+        // intakeLocations is an ordered list; element at position i corresponds to location of i'th instance of operator
+        intakeJobInfo.setIntakeLocation(intakeLocations);
+        intakeJobInfo.getIntakeFeedJoint().setState(State.ACTIVE);
+        intakeJobInfo.setState(FeedJobState.ACTIVE);
+
+        // notify event listeners
+        notifyFeedEventSubscribers(intakeJobInfo, FeedLifecycleEvent.FEED_INTAKE_STARTED);
+    }
+
+    private void handleCollectJobStartMessage(FeedConnectJobInfo cInfo) throws RemoteException, ACIDException {
+        // set locations of feed sub-operations (intake, compute, store)
+        setLocations(cInfo);
+
+        // activate joints
+        List<IFeedJoint> joints = feedPipeline.get(cInfo.getConnectionId().getFeedId());
+        for (IFeedJoint joint : joints) {
+            if (joint.getProvider().equals(cInfo.getConnectionId())) {
+                joint.setState(State.ACTIVE);
+                if (joint.getType().equals(IFeedJoint.FeedJointType.COMPUTE)) {
+                    cInfo.setComputeFeedJoint(joint);
+                }
+            }
+        }
+        cInfo.setState(FeedJobState.ACTIVE);
+
+        // register activity in metadata
+        registerFeedActivity(cInfo);
+        // notify event listeners
+        notifyFeedEventSubscribers(cInfo, FeedLifecycleEvent.FEED_COLLECT_STARTED);
+    }
+
+    private void notifyFeedEventSubscribers(FeedJobInfo jobInfo, FeedLifecycleEvent event) {
+        JobType jobType = jobInfo.getJobType();
+        List<FeedConnectionId> impactedConnections = new ArrayList<FeedConnectionId>();
+        if (jobType.equals(JobType.INTAKE)) {
+            FeedId feedId = ((FeedIntakeInfo) jobInfo).getFeedId();
+            for (FeedConnectionId connId : eventSubscribers.keySet()) {
+                if (connId.getFeedId().equals(feedId)) {
+                    impactedConnections.add(connId);
+                }
+            }
+        } else {
+            impactedConnections.add(((FeedConnectJobInfo) jobInfo).getConnectionId());
+        }
+
+        for (FeedConnectionId connId : impactedConnections) {
+            List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connId);
+            if (subscribers != null && !subscribers.isEmpty()) {
+                for (IFeedLifecycleEventSubscriber subscriber : subscribers) {
+                    subscriber.handleFeedEvent(event);
+                }
+            }
+        }
+    }
+
+    public synchronized void submitFeedConnectionRequest(IFeedJoint feedJoint, final FeedConnectionRequest request)
+            throws Exception {
+        List<String> locations = null;
+        switch (feedJoint.getType()) {
+            case INTAKE:
+                FeedIntakeInfo intakeInfo = intakeJobInfos.get(feedJoint.getOwnerFeedId());
+                locations = intakeInfo.getIntakeLocation();
+                break;
+            case COMPUTE:
+                FeedConnectionId connectionId = feedJoint.getProvider();
+                FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+                locations = cInfo.getComputeLocations();
+                break;
+        }
+
+        SubscribeFeedWork work = new SubscribeFeedWork(locations.toArray(new String[] {}), request);
+        FeedWorkManager.INSTANCE.submitWork(work, new SubscribeFeedWork.FeedSubscribeWorkEventListener());
+    }
+
+    public IFeedJoint getSourceFeedJoint(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        if (cInfo != null) {
+            return cInfo.getSourceFeedJoint();
+        }
+        return null;
+    }
+
+    public Set<FeedConnectionId> getActiveFeedConnections() {
+        Set<FeedConnectionId> activeConnections = new HashSet<FeedConnectionId>();
+        for (FeedConnectJobInfo cInfo : connectJobInfos.values()) {
+            if (cInfo.getState().equals(FeedJobState.ACTIVE)) {
+                activeConnections.add(cInfo.getConnectionId());
+            }
+        }
+        return activeConnections;
+    }
+
+    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        if (cInfo != null) {
+            return cInfo.getState().equals(FeedJobState.ACTIVE);
+        }
+        return false;
+    }
+
+    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
+        FeedConnectJobInfo connectJobInfo = connectJobInfos.get(connectionId);
+        connectJobInfo.setState(jobState);
+    }
+
+    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getState();
+    }
+
+    private void handleFeedIntakeJobFinishMessage(FeedIntakeInfo intakeInfo, Message message) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(message.jobId);
+        JobStatus status = info.getStatus();
+        FeedLifecycleEvent event;
+        event = status.equals(JobStatus.FAILURE) ? FeedLifecycleEvent.FEED_INTAKE_FAILURE
+                : FeedLifecycleEvent.FEED_ENDED;
+
+        // remove feed joints
+        deregisterFeedIntakeJob(message.jobId);
+
+        // notify event listeners
+        notifyFeedEventSubscribers(intakeInfo, event);
+
+    }
+
+    private void handleFeedCollectJobFinishMessage(FeedConnectJobInfo cInfo) throws Exception {
+        FeedConnectionId connectionId = cInfo.getConnectionId();
+
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobInfo info = hcc.getJobInfo(cInfo.getJobId());
+        JobStatus status = info.getStatus();
+        boolean failure = status != null && status.equals(JobStatus.FAILURE);
+        FeedPolicyAccessor fpa = new FeedPolicyAccessor(cInfo.getFeedPolicy());
+
+        boolean removeJobHistory = !failure;
+        boolean retainSubsription = cInfo.getState().equals(FeedJobState.UNDER_RECOVERY)
+                || (failure && fpa.continueOnHardwareFailure());
+
+        if (!retainSubsription) {
+            IFeedJoint feedJoint = cInfo.getSourceFeedJoint();
+            feedJoint.removeReceiver(connectionId);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Subscription " + cInfo.getConnectionId() + " completed successfully. Removed subscription");
+            }
+            removeFeedJointsPostPipelineTermination(cInfo.getConnectionId());
+        }
+
+        if (removeJobHistory) {
+            connectJobInfos.remove(connectionId);
+            jobInfos.remove(cInfo.getJobId());
+            feedIntakeProgressTrackers.remove(cInfo.getConnectionId());
+        }
+        deregisterFeedActivity(cInfo);
+
+        // notify event listeners
+        FeedLifecycleEvent event = failure ? FeedLifecycleEvent.FEED_COLLECT_FAILURE : FeedLifecycleEvent.FEED_ENDED;
+        notifyFeedEventSubscribers(cInfo, event);
+    }
+
+    private void registerFeedActivity(FeedConnectJobInfo cInfo) {
+        Map<String, String> feedActivityDetails = new HashMap<String, String>();
+
+        if (cInfo.getCollectLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.INTAKE_LOCATIONS,
+                    StringUtils.join(cInfo.getCollectLocations().iterator(), ','));
+        }
+
+        if (cInfo.getComputeLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS,
+                    StringUtils.join(cInfo.getComputeLocations().iterator(), ','));
+        }
+
+        if (cInfo.getStorageLocations() != null) {
+            feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS,
+                    StringUtils.join(cInfo.getStorageLocations().iterator(), ','));
+        }
+
+        String policyName = cInfo.getFeedPolicy().get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
+        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
+
+        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_CONNECT_TIMESTAMP, (new Date()).toString());
+        try {
+            FeedActivity feedActivity = new FeedActivity(cInfo.getConnectionId().getFeedId().getDataverse(), cInfo
+                    .getConnectionId().getFeedId().getFeedName(), cInfo.getConnectionId().getDatasetName(),
+                    feedActivityDetails);
+            CentralFeedManager.getInstance().getFeedLoadManager()
+                    .reportFeedActivity(cInfo.getConnectionId(), feedActivity);
+
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to register feed activity for " + cInfo + " " + e.getMessage());
+            }
+
+        }
+
+    }
+
+    public void deregisterFeedActivity(FeedConnectJobInfo cInfo) {
+        try {
+            CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(cInfo.getConnectionId());
+        } catch (Exception e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to deregister feed activity for " + cInfo + " " + e.getMessage());
+            }
+        }
+    }
+
+    public void removeFeedJointsPostPipelineTermination(FeedConnectionId connectionId) {
+        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
+        List<IFeedJoint> feedJoints = feedPipeline.get(connectionId.getFeedId());
+
+        IFeedJoint sourceJoint = cInfo.getSourceFeedJoint();
+        List<FeedConnectionId> all = sourceJoint.getReceivers();
+        boolean removeSourceJoint = all.size() < 2;
+        if (removeSourceJoint) {
+            feedJoints.remove(sourceJoint);
+        }
+
+        IFeedJoint computeJoint = cInfo.getComputeFeedJoint();
+        if (computeJoint != null && computeJoint.getReceivers().size() < 2) {
+            feedJoints.remove(computeJoint);
+        }
+    }
+
+    public boolean isRegisteredFeedJob(JobId jobId) {
+        return jobInfos.get(jobId) != null;
+    }
+
+    public List<String> getFeedComputeLocations(FeedId feedId) {
+        List<IFeedJoint> feedJoints = feedPipeline.get(feedId);
+        for (IFeedJoint joint : feedJoints) {
+            if (joint.getFeedJointKey().getFeedId().equals(feedId)) {
+                return connectJobInfos.get(joint.getProvider()).getComputeLocations();
+            }
+        }
+        return null;
+    }
+
+    public List<String> getFeedStorageLocations(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getStorageLocations();
+    }
+
+    public List<String> getFeedCollectLocations(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getCollectLocations();
+    }
+
+    public List<String> getFeedIntakeLocations(FeedId feedId) {
+        return intakeJobInfos.get(feedId).getIntakeLocation();
+    }
+
+    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getJobId();
+    }
+
+    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
+        if (subscribers == null) {
+            subscribers = new ArrayList<IFeedLifecycleEventSubscriber>();
+            eventSubscribers.put(connectionId, subscribers);
+        }
+        subscribers.add(subscriber);
+    }
+
+    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
+        if (subscribers != null) {
+            subscribers.remove(subscriber);
+        }
+    }
+
+    //============================
+
+    public boolean isFeedPointAvailable(FeedJointKey feedJointKey) {
+        List<IFeedJoint> joints = feedPipeline.get(feedJointKey.getFeedId());
+        if (joints != null && !joints.isEmpty()) {
+            for (IFeedJoint joint : joints) {
+                if (joint.getFeedJointKey().equals(feedJointKey)) {
+                    return true;
+                }
+            }
+        }
+        return false;
+    }
+
+    public Collection<IFeedJoint> getFeedIntakeJoints() {
+        List<IFeedJoint> intakeFeedPoints = new ArrayList<IFeedJoint>();
+        for (FeedIntakeInfo info : intakeJobInfos.values()) {
+            intakeFeedPoints.add(info.getIntakeFeedJoint());
+        }
+        return intakeFeedPoints;
+    }
+
+    public IFeedJoint getFeedJoint(FeedJointKey feedPointKey) {
+        List<IFeedJoint> joints = feedPipeline.get(feedPointKey.getFeedId());
+        if (joints != null && !joints.isEmpty()) {
+            for (IFeedJoint joint : joints) {
+                if (joint.getFeedJointKey().equals(feedPointKey)) {
+                    return joint;
+                }
+            }
+        }
+        return null;
+    }
+
+    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
+        IFeedJoint feedJoint = getFeedJoint(feedJointKey);
+        if (feedJoint != null) {
+            return feedJoint;
+        } else {
+            String jointKeyString = feedJointKey.getStringRep();
+            List<IFeedJoint> jointsOnPipeline = feedPipeline.get(feedJointKey.getFeedId());
+            IFeedJoint candidateJoint = null;
+            if (jointsOnPipeline != null) {
+                for (IFeedJoint joint : jointsOnPipeline) {
+                    if (jointKeyString.contains(joint.getFeedJointKey().getStringRep())) {
+                        if (candidateJoint == null) {
+                            candidateJoint = joint;
+                        } else if (joint.getFeedJointKey().getStringRep()
+                                .contains(candidateJoint.getFeedJointKey().getStringRep())) { // found feed point is a super set of the earlier find
+                            candidateJoint = joint;
+                        }
+                    }
+                }
+            }
+            return candidateJoint;
+        }
+    }
+
+    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId).getSpec();
+    }
+
+    public IFeedJoint getFeedPoint(FeedId sourceFeedId, IFeedJoint.FeedJointType type) {
+        List<IFeedJoint> joints = feedPipeline.get(sourceFeedId);
+        for (IFeedJoint joint : joints) {
+            if (joint.getType().equals(type)) {
+                return joint;
+            }
+        }
+        return null;
+    }
+
+    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
+        return connectJobInfos.get(connectionId);
+    }
+
+    private void setLocations(FeedConnectJobInfo cInfo) {
+        JobSpecification jobSpec = cInfo.getSpec();
+
+        List<OperatorDescriptorId> collectOperatorIds = new ArrayList<OperatorDescriptorId>();
+        List<OperatorDescriptorId> computeOperatorIds = new ArrayList<OperatorDescriptorId>();
+        List<OperatorDescriptorId> storageOperatorIds = new ArrayList<OperatorDescriptorId>();
+
+        Map<OperatorDescriptorId, IOperatorDescriptor> operators = jobSpec.getOperatorMap();
+        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
+            IOperatorDescriptor opDesc = entry.getValue();
+            IOperatorDescriptor actualOp = null;
+            if (opDesc instanceof FeedMetaOperatorDescriptor) {
+                actualOp = ((FeedMetaOperatorDescriptor) opDesc).getCoreOperator();
+            } else {
+                actualOp = opDesc;
+            }
+
+            if (actualOp instanceof AlgebricksMetaOperatorDescriptor) {
+                AlgebricksMetaOperatorDescriptor op = ((AlgebricksMetaOperatorDescriptor) actualOp);
+                IPushRuntimeFactory[] runtimeFactories = op.getPipeline().getRuntimeFactories();
+                boolean computeOp = false;
+                for (IPushRuntimeFactory rf : runtimeFactories) {
+                    if (rf instanceof AssignRuntimeFactory) {
+                        IConnectorDescriptor connDesc = jobSpec.getOperatorInputMap().get(op.getOperatorId()).get(0);
+                        IOperatorDescriptor sourceOp = jobSpec.getConnectorOperatorMap().get(connDesc.getConnectorId())
+                                .getLeft().getLeft();
+                        if (sourceOp instanceof FeedCollectOperatorDescriptor) {
+                            computeOp = true;
+                            break;
+                        }
+                    }
+                }
+                if (computeOp) {
+                    computeOperatorIds.add(entry.getKey());
+                }
+            } else if (actualOp instanceof LSMTreeIndexInsertUpdateDeleteOperatorDescriptor) {
+                storageOperatorIds.add(entry.getKey());
+            } else if (actualOp instanceof FeedCollectOperatorDescriptor) {
+                collectOperatorIds.add(entry.getKey());
+            }
+        }
+
+        try {
+            IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+            JobInfo info = hcc.getJobInfo(cInfo.getJobId());
+            List<String> collectLocations = new ArrayList<String>();
+            for (OperatorDescriptorId collectOpId : collectOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(collectOpId);
+                int nOperatorInstances = operatorLocations.size();
+                for (int i = 0; i < nOperatorInstances; i++) {
+                    collectLocations.add(operatorLocations.get(i));
+                }
+            }
+
+            List<String> computeLocations = new ArrayList<String>();
+            for (OperatorDescriptorId computeOpId : computeOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
+                if (operatorLocations != null) {
+                    int nOperatorInstances = operatorLocations.size();
+                    for (int i = 0; i < nOperatorInstances; i++) {
+                        computeLocations.add(operatorLocations.get(i));
+                    }
+                } else {
+                    computeLocations.clear();
+                    computeLocations.addAll(collectLocations);
+                }
+            }
+
+            List<String> storageLocations = new ArrayList<String>();
+            for (OperatorDescriptorId storageOpId : storageOperatorIds) {
+                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
+                if (operatorLocations == null) {
+                    continue;
+                }
+                int nOperatorInstances = operatorLocations.size();
+                for (int i = 0; i < nOperatorInstances; i++) {
+                    storageLocations.add(operatorLocations.get(i));
+                }
+            }
+            cInfo.setCollectLocations(collectLocations);
+            cInfo.setComputeLocations(computeLocations);
+            cInfo.setStorageLocations(storageLocations);
+
+        } catch (Exception e) {
+            e.printStackTrace();
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedJoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedJoint.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedJoint.java
new file mode 100644
index 0000000..e650a5b
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedJoint.java
@@ -0,0 +1,190 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+
+public class FeedJoint implements IFeedJoint {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJoint.class.getName());
+
+    /** A unique key associated with the feed point **/
+    private final FeedJointKey key;
+
+    /** The state associated with the FeedJoint **/
+    private State state;
+
+    /** A list of subscribers that receive data from this FeedJoint **/
+    private final List<FeedConnectionId> receivers;
+
+    /** The feedId on which the feedPoint resides **/
+    private final FeedId ownerFeedId;
+
+    /** A list of feed subscription requests submitted for subscribing to the FeedPoint's data **/
+    private final List<FeedConnectionRequest> connectionRequests;
+
+    private final ConnectionLocation connectionLocation;
+
+    private final FeedJointType type;
+
+    private FeedConnectionId provider;
+
+    public FeedJoint(FeedJointKey key, FeedId ownerFeedId, ConnectionLocation subscriptionLocation, FeedJointType type,
+            FeedConnectionId provider) {
+        this.key = key;
+        this.ownerFeedId = ownerFeedId;
+        this.type = type;
+        this.receivers = new ArrayList<FeedConnectionId>();
+        this.state = State.CREATED;
+        this.connectionLocation = subscriptionLocation;
+        this.connectionRequests = new ArrayList<FeedConnectionRequest>();
+        this.provider = provider;
+    }
+
+    @Override
+    public int hashCode() {
+        return key.hashCode();
+    }
+
+    public void addReceiver(FeedConnectionId connectionId) {
+        receivers.add(connectionId);
+    }
+
+    public void removeReceiver(FeedConnectionId connectionId) {
+        receivers.remove(connectionId);
+    }
+
+    public synchronized void addConnectionRequest(FeedConnectionRequest request) {
+        connectionRequests.add(request);
+        if (state.equals(State.ACTIVE)) {
+            handlePendingConnectionRequest();
+        }
+    }
+
+    public synchronized void setState(State state) {
+        if (this.state.equals(state)) {
+            return;
+        }
+        this.state = state;
+        if (this.state.equals(State.ACTIVE)) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Feed joint " + this + " is now " + State.ACTIVE);
+            }
+            handlePendingConnectionRequest();
+        }
+    }
+
+    private void handlePendingConnectionRequest() {
+        for (FeedConnectionRequest connectionRequest : connectionRequests) {
+            FeedConnectionId connectionId = new FeedConnectionId(connectionRequest.getReceivingFeedId(),
+                    connectionRequest.getTargetDataset());
+            try {
+                FeedLifecycleListener.INSTANCE.submitFeedConnectionRequest(this, connectionRequest);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Submitted feed connection request " + connectionRequest + " at feed joint " + this);
+                }
+                addReceiver(connectionId);
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Unsuccessful attempt at submitting connection request " + connectionRequest
+                            + " at feed joint " + this + ". Message " + e.getMessage());
+                }
+                e.printStackTrace();
+            }
+        }
+        connectionRequests.clear();
+    }
+
+    public FeedConnectionId getReceiver(FeedConnectionId connectionId) {
+        for (FeedConnectionId cid : receivers) {
+            if (cid.equals(connectionId)) {
+                return cid;
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public String toString() {
+        return key.toString() + " [" + connectionLocation + "]" + "[" + state + "]";
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o == null) {
+            return false;
+        }
+        if (o == this) {
+            return true;
+        }
+        if (!(o instanceof FeedJoint)) {
+            return false;
+        }
+        return ((FeedJoint) o).getFeedJointKey().equals(this.key);
+    }
+
+    public FeedId getOwnerFeedId() {
+        return ownerFeedId;
+    }
+
+    public List<FeedConnectionRequest> getConnectionRequests() {
+        return connectionRequests;
+    }
+
+    public ConnectionLocation getConnectionLocation() {
+        return connectionLocation;
+    }
+
+    public FeedJointType getType() {
+        return type;
+    }
+
+    @Override
+    public FeedConnectionId getProvider() {
+        return provider;
+    }
+
+    public List<FeedConnectionId> getReceivers() {
+        return receivers;
+    }
+
+    public FeedJointKey getKey() {
+        return key;
+    }
+
+    public synchronized State getState() {
+        return state;
+    }
+
+    @Override
+    public FeedJointKey getFeedJointKey() {
+        return key;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedLifecycleListener.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedLifecycleListener.java
new file mode 100644
index 0000000..8e44af4
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedLifecycleListener.java
@@ -0,0 +1,499 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.ExecutorService;
+import java.util.concurrent.Executors;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.api.common.SessionConfig;
+import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.aql.translator.QueryTranslator;
+import org.apache.asterix.common.api.IClusterManagementWork;
+import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
+import org.apache.asterix.common.api.IClusterManagementWorkResponse;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener;
+import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
+import org.apache.asterix.external.feed.management.FeedCollectInfo;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.management.FeedJointKey;
+import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
+import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
+import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
+import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
+import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
+import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.lang.common.statement.DataverseDecl;
+import org.apache.asterix.lang.common.statement.DisconnectFeedStatement;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.metadata.MetadataManager;
+import org.apache.asterix.metadata.MetadataTransactionContext;
+import org.apache.asterix.metadata.cluster.AddNodeWork;
+import org.apache.asterix.metadata.cluster.ClusterManager;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.asterix.om.util.AsterixClusterProperties;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.exceptions.HyracksException;
+import org.apache.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+/**
+ * A listener that subscribes to events associated with cluster membership
+ * (nodes joining/leaving the cluster) and job lifecycle (start/end of a job).
+ * Subscription to such events allows keeping track of feed ingestion jobs and
+ * take any corrective action that may be required when a node involved in a
+ * feed leaves the cluster.
+ */
+public class FeedLifecycleListener implements IFeedLifecycleListener {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedLifecycleListener.class.getName());
+
+    public static FeedLifecycleListener INSTANCE = new FeedLifecycleListener();
+    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
+
+    private final LinkedBlockingQueue<Message> jobEventInbox;
+    private final LinkedBlockingQueue<IClusterManagementWorkResponse> responseInbox;
+    private final Map<FeedCollectInfo, List<String>> dependentFeeds = new HashMap<FeedCollectInfo, List<String>>();
+    private final Map<FeedConnectionId, LinkedBlockingQueue<String>> feedReportQueue;
+    private final FeedJobNotificationHandler feedJobNotificationHandler;
+    private final FeedWorkRequestResponseHandler feedWorkRequestResponseHandler;
+    private final ExecutorService executorService;
+
+    private ClusterState state;
+
+    private FeedLifecycleListener() {
+        this.jobEventInbox = new LinkedBlockingQueue<Message>();
+        this.feedJobNotificationHandler = new FeedJobNotificationHandler(jobEventInbox);
+        this.responseInbox = new LinkedBlockingQueue<IClusterManagementWorkResponse>();
+        this.feedWorkRequestResponseHandler = new FeedWorkRequestResponseHandler(responseInbox);
+        this.feedReportQueue = new HashMap<FeedConnectionId, LinkedBlockingQueue<String>>();
+        this.executorService = Executors.newCachedThreadPool();
+        this.executorService.execute(feedJobNotificationHandler);
+        this.executorService.execute(feedWorkRequestResponseHandler);
+        ClusterManager.INSTANCE.registerSubscriber(this);
+        this.state = AsterixClusterProperties.INSTANCE.getState();
+    }
+
+    @Override
+    public void notifyJobStart(JobId jobId) throws HyracksException {
+        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
+            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_START));
+        }
+    }
+
+    @Override
+    public void notifyJobFinish(JobId jobId) throws HyracksException {
+        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
+            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_FINISH));
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("NO NEED TO NOTIFY JOB FINISH!");
+            }
+        }
+    }
+
+    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedConnectJobInfo(connectionId);
+    }
+
+    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
+            IIntakeProgressTracker feedIntakeProgressTracker) {
+        feedJobNotificationHandler.registerFeedIntakeProgressTracker(connectionId, feedIntakeProgressTracker);
+    }
+
+    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
+        feedJobNotificationHandler.deregisterFeedIntakeProgressTracker(connectionId);
+    }
+
+    public void updateTrackingInformation(StorageReportFeedMessage srm) {
+        feedJobNotificationHandler.updateTrackingInformation(srm);
+    }
+
+    /*
+     * Traverse job specification to categorize job as a feed intake job or a feed collection job
+     */
+    @Override
+    public void notifyJobCreation(JobId jobId, IActivityClusterGraphGeneratorFactory acggf) throws HyracksException {
+        JobSpecification spec = acggf.getJobSpecification();
+        FeedConnectionId feedConnectionId = null;
+        Map<String, String> feedPolicy = null;
+        for (IOperatorDescriptor opDesc : spec.getOperatorMap().values()) {
+            if (opDesc instanceof FeedCollectOperatorDescriptor) {
+                feedConnectionId = ((FeedCollectOperatorDescriptor) opDesc).getFeedConnectionId();
+                feedPolicy = ((FeedCollectOperatorDescriptor) opDesc).getFeedPolicyProperties();
+                feedJobNotificationHandler.registerFeedCollectionJob(
+                        ((FeedCollectOperatorDescriptor) opDesc).getSourceFeedId(), feedConnectionId, jobId, spec,
+                        feedPolicy);
+                break;
+            } else if (opDesc instanceof FeedIntakeOperatorDescriptor) {
+                feedJobNotificationHandler.registerFeedIntakeJob(((FeedIntakeOperatorDescriptor) opDesc).getFeedId(),
+                        jobId, spec);
+                break;
+            }
+        }
+    }
+
+    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
+        feedJobNotificationHandler.setJobState(connectionId, jobState);
+    }
+
+    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedJobState(connectionId);
+    }
+
+    public static class Message {
+        public JobId jobId;
+
+        public enum MessageKind {
+            JOB_START,
+            JOB_FINISH
+        }
+
+        public MessageKind messageKind;
+
+        public Message(JobId jobId, MessageKind msgKind) {
+            this.jobId = jobId;
+            this.messageKind = msgKind;
+        }
+    }
+
+    @Override
+    public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
+        Set<IClusterManagementWork> workToBeDone = new HashSet<IClusterManagementWork>();
+
+        Collection<FeedIntakeInfo> intakeInfos = feedJobNotificationHandler.getFeedIntakeInfos();
+        Collection<FeedConnectJobInfo> connectJobInfos = feedJobNotificationHandler.getFeedConnectInfos();
+
+        Map<String, List<FeedJobInfo>> impactedJobs = new HashMap<String, List<FeedJobInfo>>();
+
+        for (String deadNode : deadNodeIds) {
+            for (FeedIntakeInfo intakeInfo : intakeInfos) {
+                if (intakeInfo.getIntakeLocation().contains(deadNode)) {
+                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
+                    if (infos == null) {
+                        infos = new ArrayList<FeedJobInfo>();
+                        impactedJobs.put(deadNode, infos);
+                    }
+                    infos.add(intakeInfo);
+                    intakeInfo.setState(FeedJobState.UNDER_RECOVERY);
+                }
+            }
+
+            for (FeedConnectJobInfo connectInfo : connectJobInfos) {
+                if (connectInfo.getStorageLocations().contains(deadNode)) {
+                    continue;
+                }
+                if (connectInfo.getComputeLocations().contains(deadNode)
+                        || connectInfo.getCollectLocations().contains(deadNode)) {
+                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
+                    if (infos == null) {
+                        infos = new ArrayList<FeedJobInfo>();
+                        impactedJobs.put(deadNode, infos);
+                    }
+                    infos.add(connectInfo);
+                    connectInfo.setState(FeedJobState.UNDER_RECOVERY);
+                    feedJobNotificationHandler.deregisterFeedActivity(connectInfo);
+                }
+            }
+
+        }
+
+        if (impactedJobs.size() > 0) {
+            AddNodeWork addNodeWork = new AddNodeWork(deadNodeIds, deadNodeIds.size(), this);
+            feedWorkRequestResponseHandler.registerFeedWork(addNodeWork.getWorkId(), impactedJobs);
+            workToBeDone.add(addNodeWork);
+        }
+        return workToBeDone;
+
+    }
+
+    public static class FailureReport {
+
+        private final List<Pair<FeedConnectJobInfo, List<String>>> recoverableConnectJobs;
+        private final Map<IFeedJoint, List<String>> recoverableIntakeFeedIds;
+
+        public FailureReport(Map<IFeedJoint, List<String>> recoverableIntakeFeedIds,
+                List<Pair<FeedConnectJobInfo, List<String>>> recoverableSubscribers) {
+            this.recoverableConnectJobs = recoverableSubscribers;
+            this.recoverableIntakeFeedIds = recoverableIntakeFeedIds;
+        }
+
+        public List<Pair<FeedConnectJobInfo, List<String>>> getRecoverableSubscribers() {
+            return recoverableConnectJobs;
+        }
+
+        public Map<IFeedJoint, List<String>> getRecoverableIntakeFeedIds() {
+            return recoverableIntakeFeedIds;
+        }
+
+    }
+
+    @Override
+    public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
+        ClusterState newState = AsterixClusterProperties.INSTANCE.getState();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info(joinedNodeId + " joined the cluster. " + "Asterix state: " + newState);
+        }
+
+        boolean needToReActivateFeeds = !newState.equals(state) && (newState == ClusterState.ACTIVE);
+        if (needToReActivateFeeds) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(joinedNodeId + " Resuming loser feeds (if any)");
+            }
+            try {
+                FeedsActivator activator = new FeedsActivator();
+                (new Thread(activator)).start();
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Exception in resuming feeds" + e.getMessage());
+                }
+            }
+            state = newState;
+        } else {
+            List<FeedCollectInfo> feedsThatCanBeRevived = new ArrayList<FeedCollectInfo>();
+            for (Entry<FeedCollectInfo, List<String>> entry : dependentFeeds.entrySet()) {
+                List<String> requiredNodeIds = entry.getValue();
+                if (requiredNodeIds.contains(joinedNodeId)) {
+                    requiredNodeIds.remove(joinedNodeId);
+                    if (requiredNodeIds.isEmpty()) {
+                        feedsThatCanBeRevived.add(entry.getKey());
+                    }
+                }
+            }
+            if (!feedsThatCanBeRevived.isEmpty()) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info(joinedNodeId + " Resuming feeds after rejoining of node " + joinedNodeId);
+                }
+                FeedsActivator activator = new FeedsActivator(feedsThatCanBeRevived);
+                (new Thread(activator)).start();
+            }
+        }
+        return null;
+    }
+
+    @Override
+    public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
+        try {
+            responseInbox.put(response);
+        } catch (InterruptedException e) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Interrupted exception");
+            }
+        }
+    }
+
+    @Override
+    public void notifyStateChange(ClusterState previousState, ClusterState newState) {
+        switch (newState) {
+            case ACTIVE:
+                if (previousState.equals(ClusterState.UNUSABLE)) {
+                    try {
+                        // TODO: Figure out why code was commented
+                        // FeedsActivator activator = new FeedsActivator();
+                        // (new Thread(activator)).start();
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Exception in resuming feeds" + e.getMessage());
+                        }
+                    }
+                }
+                break;
+            default:
+                break;
+        }
+
+    }
+
+    public static class FeedsDeActivator implements Runnable {
+
+        private List<FeedConnectJobInfo> failedConnectjobs;
+
+        public FeedsDeActivator(List<FeedConnectJobInfo> failedConnectjobs) {
+            this.failedConnectjobs = failedConnectjobs;
+        }
+
+        @Override
+        public void run() {
+            for (FeedConnectJobInfo failedConnectJob : failedConnectjobs) {
+                endFeed(failedConnectJob);
+            }
+        }
+
+        private void endFeed(FeedConnectJobInfo cInfo) {
+            MetadataTransactionContext ctx = null;
+            PrintWriter writer = new PrintWriter(System.out, true);
+            SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+
+            try {
+                ctx = MetadataManager.INSTANCE.beginTransaction();
+                FeedId feedId = cInfo.getConnectionId().getFeedId();
+                DisconnectFeedStatement stmt = new DisconnectFeedStatement(new Identifier(feedId.getDataverse()),
+                        new Identifier(feedId.getFeedName()), new Identifier(cInfo.getConnectionId().getDatasetName()));
+                List<Statement> statements = new ArrayList<Statement>();
+                DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(feedId.getDataverse()));
+                statements.add(dataverseDecl);
+                statements.add(stmt);
+                QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
+                translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                        QueryTranslator.ResultDelivery.SYNC);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("End irrecoverable feed: " + cInfo.getConnectionId());
+                }
+                MetadataManager.INSTANCE.commitTransaction(ctx);
+            } catch (Exception e) {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Exception in ending loser feed: " + cInfo.getConnectionId() + " Exception "
+                            + e.getMessage());
+                }
+                e.printStackTrace();
+                try {
+                    MetadataManager.INSTANCE.abortTransaction(ctx);
+                } catch (Exception e2) {
+                    e2.addSuppressed(e);
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.severe("Exception in aborting transaction! System is in inconsistent state");
+                    }
+                }
+
+            }
+
+        }
+    }
+
+    public void submitFeedConnectionRequest(IFeedJoint feedPoint, FeedConnectionRequest subscriptionRequest)
+            throws Exception {
+        feedJobNotificationHandler.submitFeedConnectionRequest(feedPoint, subscriptionRequest);
+    }
+
+    @Override
+    public List<FeedConnectionId> getActiveFeedConnections(FeedId feedId) {
+        List<FeedConnectionId> connections = new ArrayList<FeedConnectionId>();
+        Collection<FeedConnectionId> activeConnections = feedJobNotificationHandler.getActiveFeedConnections();
+        if (feedId != null) {
+            for (FeedConnectionId connectionId : activeConnections) {
+                if (connectionId.getFeedId().equals(feedId)) {
+                    connections.add(connectionId);
+                }
+            }
+        } else {
+            connections.addAll(activeConnections);
+        }
+        return connections;
+    }
+
+    @Override
+    public List<String> getComputeLocations(FeedId feedId) {
+        return feedJobNotificationHandler.getFeedComputeLocations(feedId);
+    }
+
+    @Override
+    public List<String> getIntakeLocations(FeedId feedId) {
+        return feedJobNotificationHandler.getFeedIntakeLocations(feedId);
+    }
+
+    @Override
+    public List<String> getStoreLocations(FeedConnectionId feedConnectionId) {
+        return feedJobNotificationHandler.getFeedStorageLocations(feedConnectionId);
+    }
+
+    @Override
+    public List<String> getCollectLocations(FeedConnectionId feedConnectionId) {
+        return feedJobNotificationHandler.getFeedCollectLocations(feedConnectionId);
+    }
+
+    @Override
+    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.isFeedConnectionActive(connectionId);
+    }
+
+    public void reportPartialDisconnection(FeedConnectionId connectionId) {
+        feedJobNotificationHandler.removeFeedJointsPostPipelineTermination(connectionId);
+    }
+
+    public void registerFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
+        feedReportQueue.put(feedId, queue);
+    }
+
+    public void deregisterFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
+        feedReportQueue.remove(feedId);
+    }
+
+    public LinkedBlockingQueue<String> getFeedReportQueue(FeedConnectionId feedId) {
+        return feedReportQueue.get(feedId);
+    }
+
+    @Override
+    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.getAvailableFeedJoint(feedJointKey);
+    }
+
+    @Override
+    public boolean isFeedJointAvailable(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.isFeedPointAvailable(feedJointKey);
+    }
+
+    public void registerFeedJoint(IFeedJoint feedJoint) {
+        feedJobNotificationHandler.registerFeedJoint(feedJoint);
+    }
+
+    public IFeedJoint getFeedJoint(FeedJointKey feedJointKey) {
+        return feedJobNotificationHandler.getFeedJoint(feedJointKey);
+    }
+
+    @Override
+    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        feedJobNotificationHandler.registerFeedEventSubscriber(connectionId, subscriber);
+    }
+
+    @Override
+    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
+        feedJobNotificationHandler.deregisterFeedEventSubscriber(connectionId, subscriber);
+
+    }
+
+    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getCollectJobSpecification(connectionId);
+    }
+
+    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
+        return feedJobNotificationHandler.getFeedCollectJobId(connectionId);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedLoadManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedLoadManager.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedLoadManager.java
new file mode 100644
index 0000000..5a590b4
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedLoadManager.java
@@ -0,0 +1,301 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.util.ArrayList;
+import java.util.Collection;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeSet;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.feed.api.IFeedLoadManager;
+import org.apache.asterix.external.feed.api.IFeedTrackingManager;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.FeedCongestionMessage;
+import org.apache.asterix.external.feed.message.FeedReportMessage;
+import org.apache.asterix.external.feed.message.PrepareStallMessage;
+import org.apache.asterix.external.feed.message.ScaleInReportMessage;
+import org.apache.asterix.external.feed.message.TerminateDataFlowMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
+import org.apache.asterix.external.feed.watch.FeedActivity;
+import org.apache.asterix.external.feed.watch.NodeLoadReport;
+import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
+import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedLoadManager implements IFeedLoadManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedLoadManager.class.getName());
+
+    private static final long MIN_MODIFICATION_INTERVAL = 180000; // 10 seconds
+    private final TreeSet<NodeLoadReport> nodeReports;
+    private final Map<FeedConnectionId, FeedActivity> feedActivities;
+    private final Map<String, Pair<Integer, Integer>> feedMetrics;
+
+    private FeedConnectionId lastModified;
+    private long lastModifiedTimestamp;
+
+    private static final int UNKNOWN = -1;
+
+    public FeedLoadManager() {
+        this.nodeReports = new TreeSet<NodeLoadReport>();
+        this.feedActivities = new HashMap<FeedConnectionId, FeedActivity>();
+        this.feedMetrics = new HashMap<String, Pair<Integer, Integer>>();
+    }
+
+    @Override
+    public void submitNodeLoadReport(NodeLoadReport report) {
+        nodeReports.remove(report);
+        nodeReports.add(report);
+    }
+
+    @Override
+    public void reportCongestion(FeedCongestionMessage message) throws AsterixException {
+        FeedRuntimeId runtimeId = message.getRuntimeId();
+        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
+        if (jobState == null
+                || (jobState.equals(FeedJobState.UNDER_RECOVERY))
+                || (message.getConnectionId().equals(lastModified) && System.currentTimeMillis()
+                        - lastModifiedTimestamp < MIN_MODIFICATION_INTERVAL)) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Ignoring congestion report from " + runtimeId);
+            }
+            return;
+        } else {
+            try {
+                FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
+                int inflowRate = message.getInflowRate();
+                int outflowRate = message.getOutflowRate();
+                List<String> currentComputeLocations = new ArrayList<String>();
+                currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message
+                        .getConnectionId().getFeedId()));
+                int computeCardinality = currentComputeLocations.size();
+                int requiredCardinality = (int) Math
+                        .ceil((double) ((computeCardinality * inflowRate) / (double) outflowRate)) + 5;
+                int additionalComputeNodes = requiredCardinality - computeCardinality;
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("INCREASING COMPUTE CARDINALITY from " + computeCardinality + " by "
+                            + additionalComputeNodes);
+                }
+
+                List<String> helperComputeNodes = getNodeForSubstitution(additionalComputeNodes);
+
+                // Step 1) Alter the original feed job to adjust the cardinality
+                JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
+                        .getConnectionId());
+                helperComputeNodes.addAll(currentComputeLocations);
+                List<String> newLocations = new ArrayList<String>();
+                newLocations.addAll(currentComputeLocations);
+                newLocations.addAll(helperComputeNodes);
+                FeedMetadataUtil.increaseCardinality(jobSpec, FeedRuntimeType.COMPUTE, requiredCardinality, newLocations);
+
+                // Step 2) send prepare to  stall message
+                gracefullyTerminateDataFlow(message.getConnectionId(), Integer.MAX_VALUE);
+
+                // Step 3) run the altered job specification
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("New Job after adjusting to the workload " + jobSpec);
+                }
+
+                Thread.sleep(10000);
+                runJob(jobSpec, false);
+                lastModified = message.getConnectionId();
+                lastModifiedTimestamp = System.currentTimeMillis();
+
+            } catch (Exception e) {
+                e.printStackTrace();
+                if (LOGGER.isLoggable(Level.SEVERE)) {
+                    LOGGER.severe("Unable to form the required job for scaling in/out" + e.getMessage());
+                }
+                throw new AsterixException(e);
+            }
+        }
+    }
+
+    @Override
+    public void submitScaleInPossibleReport(ScaleInReportMessage message) throws Exception {
+        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
+        if (jobState == null || (jobState.equals(FeedJobState.UNDER_RECOVERY))) {
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("JobState information for job " + "[" + message.getConnectionId() + "]" + " not found ");
+            }
+            return;
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Processing scale-in message " + message);
+            }
+            FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
+            JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
+                    .getConnectionId());
+            int reducedCardinality = message.getReducedCardinaliy();
+            List<String> currentComputeLocations = new ArrayList<String>();
+            currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message.getConnectionId()
+                    .getFeedId()));
+            FeedMetadataUtil.decreaseComputeCardinality(jobSpec, FeedRuntimeType.COMPUTE, reducedCardinality,
+                    currentComputeLocations);
+
+            gracefullyTerminateDataFlow(message.getConnectionId(), reducedCardinality - 1);
+            Thread.sleep(3000);
+            JobId newJobId = runJob(jobSpec, false);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Launch modified job" + "[" + newJobId + "]" + "for scale-in \n" + jobSpec);
+            }
+
+        }
+    }
+
+    private void gracefullyTerminateDataFlow(FeedConnectionId connectionId, int computePartitionRetainLimit)
+            throws Exception {
+        // Step 1) send prepare to  stall message
+        PrepareStallMessage stallMessage = new PrepareStallMessage(connectionId, computePartitionRetainLimit);
+        List<String> intakeLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+        List<String> computeLocations = FeedLifecycleListener.INSTANCE.getComputeLocations(connectionId.getFeedId());
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+
+        Set<String> operatorLocations = new HashSet<String>();
+
+        operatorLocations.addAll(intakeLocations);
+        operatorLocations.addAll(computeLocations);
+        operatorLocations.addAll(storageLocations);
+
+        JobSpecification messageJobSpec = FeedOperations.buildPrepareStallMessageJob(stallMessage, operatorLocations);
+        runJob(messageJobSpec, true);
+
+        // Step 2)
+        TerminateDataFlowMessage terminateMesg = new TerminateDataFlowMessage(connectionId);
+        messageJobSpec = FeedOperations.buildTerminateFlowMessageJob(terminateMesg, intakeLocations);
+        runJob(messageJobSpec, true);
+    }
+
+    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobId jobId = hcc.startJob(spec);
+        if (waitForCompletion) {
+            hcc.waitForCompletion(jobId);
+        }
+        return jobId;
+    }
+
+    @Override
+    public void submitFeedRuntimeReport(FeedReportMessage report) {
+        String key = "" + report.getConnectionId() + ":" + report.getRuntimeId().getFeedRuntimeType();
+        Pair<Integer, Integer> value = feedMetrics.get(key);
+        if (value == null) {
+            value = new Pair<Integer, Integer>(report.getValue(), 1);
+            feedMetrics.put(key, value);
+        } else {
+            value.first = value.first + report.getValue();
+            value.second = value.second + 1;
+        }
+    }
+
+    @Override
+    public int getOutflowRate(FeedConnectionId connectionId, FeedRuntimeType runtimeType) {
+        int rVal;
+        String key = "" + connectionId + ":" + runtimeType;
+        feedMetrics.get(key);
+        Pair<Integer, Integer> value = feedMetrics.get(key);
+        if (value == null) {
+            rVal = UNKNOWN;
+        } else {
+            rVal = value.first / value.second;
+        }
+        return rVal;
+    }
+
+    private List<String> getNodeForSubstitution(int nRequired) {
+        List<String> nodeIds = new ArrayList<String>();
+        Iterator<NodeLoadReport> it = null;
+        int nAdded = 0;
+        while (nAdded < nRequired) {
+            it = nodeReports.iterator();
+            while (it.hasNext()) {
+                nodeIds.add(it.next().getNodeId());
+                nAdded++;
+            }
+        }
+        return nodeIds;
+    }
+
+    @Override
+    public synchronized List<String> getNodes(int required) {
+        Iterator<NodeLoadReport> it;
+        List<String> allocated = new ArrayList<String>();
+        while (allocated.size() < required) {
+            it = nodeReports.iterator();
+            while (it.hasNext() && allocated.size() < required) {
+                allocated.add(it.next().getNodeId());
+            }
+        }
+        return allocated;
+    }
+
+    @Override
+    public void reportThrottlingEnabled(ThrottlingEnabledFeedMessage mesg) throws AsterixException, Exception {
+        System.out.println("Throttling Enabled for " + mesg.getConnectionId() + " " + mesg.getFeedRuntimeId());
+        FeedConnectionId connectionId = mesg.getConnectionId();
+        List<String> destinationLocations = new ArrayList<String>();
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+
+        destinationLocations.addAll(storageLocations);
+        destinationLocations.addAll(collectLocations);
+        JobSpecification messageJobSpec = FeedOperations.buildNotifyThrottlingEnabledMessageJob(mesg,
+                destinationLocations);
+        runJob(messageJobSpec, true);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.warning("Acking disabled for " + mesg.getConnectionId() + " in view of activated throttling");
+        }
+        IFeedTrackingManager trackingManager = CentralFeedManager.getInstance().getFeedTrackingManager();
+        trackingManager.disableAcking(connectionId);
+    }
+
+    @Override
+    public void reportFeedActivity(FeedConnectionId connectionId, FeedActivity activity) {
+        feedActivities.put(connectionId, activity);
+    }
+
+    @Override
+    public FeedActivity getFeedActivity(FeedConnectionId connectionId) {
+        return feedActivities.get(connectionId);
+    }
+
+    @Override
+    public Collection<FeedActivity> getFeedActivities() {
+        return feedActivities.values();
+    }
+
+    @Override
+    public void removeFeedActivity(FeedConnectionId connectionId) {
+        feedActivities.remove(connectionId);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedMessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedMessageReceiver.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedMessageReceiver.java
new file mode 100644
index 0000000..bff1a4d
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedMessageReceiver.java
@@ -0,0 +1,96 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.util.logging.Level;
+
+import org.apache.asterix.app.external.CentralFeedManager.AQLExecutor;
+import org.apache.asterix.external.feed.api.IFeedLoadManager;
+import org.apache.asterix.external.feed.api.IFeedMessage.MessageType;
+import org.apache.asterix.external.feed.api.IFeedTrackingManager;
+import org.apache.asterix.external.feed.message.FeedCongestionMessage;
+import org.apache.asterix.external.feed.message.FeedReportMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitAckMessage;
+import org.apache.asterix.external.feed.message.MessageReceiver;
+import org.apache.asterix.external.feed.message.ScaleInReportMessage;
+import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.watch.NodeLoadReport;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.hyracks.bootstrap.FeedBootstrap;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.json.JSONObject;
+
+public class FeedMessageReceiver extends MessageReceiver<String> {
+
+    private static boolean initialized;
+
+    private final IFeedLoadManager feedLoadManager;
+    private final IFeedTrackingManager feedTrackingManager;
+
+    public FeedMessageReceiver(CentralFeedManager centralFeedManager) {
+        this.feedLoadManager = centralFeedManager.getFeedLoadManager();
+        this.feedTrackingManager = centralFeedManager.getFeedTrackingManager();
+    }
+
+    @Override
+    public void processMessage(String message) throws Exception {
+        JSONObject obj = new JSONObject(message);
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Received message " + obj);
+        }
+        MessageType messageType = MessageType.valueOf(obj.getString(FeedConstants.MessageConstants.MESSAGE_TYPE));
+        switch (messageType) {
+            case XAQL:
+                if (!initialized) {
+                    FeedBootstrap.setUpInitialArtifacts();
+                    initialized = true;
+                }
+                AQLExecutor.executeAQL(obj.getString(FeedConstants.MessageConstants.AQL));
+                break;
+            case CONGESTION:
+                feedLoadManager.reportCongestion(FeedCongestionMessage.read(obj));
+                break;
+            case FEED_REPORT:
+                feedLoadManager.submitFeedRuntimeReport(FeedReportMessage.read(obj));
+                break;
+            case NODE_REPORT:
+                feedLoadManager.submitNodeLoadReport(NodeLoadReport.read(obj));
+                break;
+            case SCALE_IN_REQUEST:
+                feedLoadManager.submitScaleInPossibleReport(ScaleInReportMessage.read(obj));
+                break;
+            case STORAGE_REPORT:
+                FeedLifecycleListener.INSTANCE.updateTrackingInformation(StorageReportFeedMessage.read(obj));
+                break;
+            case COMMIT_ACK:
+                feedTrackingManager.submitAckReport(FeedTupleCommitAckMessage.read(obj));
+                break;
+            case THROTTLING_ENABLED:
+                feedLoadManager.reportThrottlingEnabled(ThrottlingEnabledFeedMessage.read(obj));
+            default:
+                break;
+        }
+
+    }
+
+    @Override
+    public void emptyInbox() throws HyracksDataException {
+    }
+}


[07/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
new file mode 100644
index 0000000..2882083
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParser.java
@@ -0,0 +1,1783 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.library;
+
+import java.io.DataOutput;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.BitSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.asterix.builders.AbvsBuilderFactory;
+import org.apache.asterix.builders.IARecordBuilder;
+import org.apache.asterix.builders.IAsterixListBuilder;
+import org.apache.asterix.builders.ListBuilderFactory;
+import org.apache.asterix.builders.OrderedListBuilder;
+import org.apache.asterix.builders.RecordBuilderFactory;
+import org.apache.asterix.builders.UnorderedListBuilder;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.api.IRecordDataParser;
+import org.apache.asterix.external.classad.AMutableCharArrayString;
+import org.apache.asterix.external.classad.AMutableNumberFactor;
+import org.apache.asterix.external.classad.AttributeReference;
+import org.apache.asterix.external.classad.CaseInsensitiveString;
+import org.apache.asterix.external.classad.CharArrayLexerSource;
+import org.apache.asterix.external.classad.ClassAd;
+import org.apache.asterix.external.classad.ExprList;
+import org.apache.asterix.external.classad.ExprTree;
+import org.apache.asterix.external.classad.ExprTree.NodeKind;
+import org.apache.asterix.external.classad.ExprTreeHolder;
+import org.apache.asterix.external.classad.FileLexerSource;
+import org.apache.asterix.external.classad.FunctionCall;
+import org.apache.asterix.external.classad.InputStreamLexerSource;
+import org.apache.asterix.external.classad.Lexer;
+import org.apache.asterix.external.classad.Lexer.TokenType;
+import org.apache.asterix.external.classad.LexerSource;
+import org.apache.asterix.external.classad.Literal;
+import org.apache.asterix.external.classad.Operation;
+import org.apache.asterix.external.classad.StringLexerSource;
+import org.apache.asterix.external.classad.TokenValue;
+import org.apache.asterix.external.classad.Value;
+import org.apache.asterix.external.classad.Value.NumberFactor;
+import org.apache.asterix.external.classad.object.pool.AttributeReferencePool;
+import org.apache.asterix.external.classad.object.pool.BitSetPool;
+import org.apache.asterix.external.classad.object.pool.ClassAdPool;
+import org.apache.asterix.external.classad.object.pool.ExprHolderPool;
+import org.apache.asterix.external.classad.object.pool.ExprListPool;
+import org.apache.asterix.external.classad.object.pool.LiteralPool;
+import org.apache.asterix.external.classad.object.pool.OperationPool;
+import org.apache.asterix.external.classad.object.pool.TokenValuePool;
+import org.apache.asterix.external.classad.object.pool.ValuePool;
+import org.apache.asterix.external.parser.AbstractDataParser;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.om.base.ABoolean;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.types.AOrderedListType;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.AUnionType;
+import org.apache.asterix.om.types.AUnorderedListType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.om.types.hierachy.ATypeHierarchy;
+import org.apache.asterix.om.util.NonTaggedFormatUtil;
+import org.apache.asterix.om.util.container.IObjectPool;
+import org.apache.asterix.om.util.container.ListObjectPool;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.hyracks.data.std.api.IMutableValueStorage;
+import org.apache.hyracks.data.std.util.ArrayBackedValueStorage;
+
+/// This reads ClassAd strings from various sources and converts them into a ClassAd.
+/// It can read from Strings, Files, and InputStreams.
+public class ClassAdParser extends AbstractDataParser implements IRecordDataParser<char[]> {
+
+    // reusable components
+    private Lexer lexer = new Lexer();
+    private LexerSource currentSource = null;
+    private boolean isExpr = false;
+    // object pools
+    private final ExprHolderPool mutableExprPool = new ExprHolderPool();
+    private final TokenValuePool tokenValuePool = new TokenValuePool();
+    private final ClassAdPool classAdPool = new ClassAdPool();
+    private final ExprListPool exprListPool = new ExprListPool();
+    private final ValuePool valuePool = new ValuePool();
+    private final LiteralPool literalPool = new LiteralPool();
+    private final BitSetPool bitSetPool = new BitSetPool();
+    private final OperationPool operationPool = new OperationPool();
+    private final AttributeReferencePool attrRefPool = new AttributeReferencePool();
+    // asterix objects
+    private ARecordType recordType;
+    private IObjectPool<IARecordBuilder, ATypeTag> recordBuilderPool = new ListObjectPool<IARecordBuilder, ATypeTag>(
+            new RecordBuilderFactory());
+    private IObjectPool<IAsterixListBuilder, ATypeTag> listBuilderPool = new ListObjectPool<IAsterixListBuilder, ATypeTag>(
+            new ListBuilderFactory());
+    private IObjectPool<IMutableValueStorage, ATypeTag> abvsBuilderPool = new ListObjectPool<IMutableValueStorage, ATypeTag>(
+            new AbvsBuilderFactory());
+    private ClassAd rootAd = new ClassAd(false, true);
+    private String exprPrefix = "expr=";
+    private String exprSuffix = "";
+    private boolean evaluateExpr = true;
+    private String exprFieldNameSuffix = "Expr";
+    private boolean keepBoth = true;
+    private boolean oldFormat = false;
+    private StringLexerSource stringLexerSource = new StringLexerSource("");
+
+    public ClassAdParser(ARecordType recordType) {
+        this.recordType = recordType;
+        this.currentSource = new CharArrayLexerSource();
+    }
+
+    public ClassAdParser() {
+        this.recordType = null;
+        this.currentSource = new CharArrayLexerSource();
+    }
+
+    /***********************************
+     * AsterixDB Specific begin
+     *
+     * @throws AsterixException
+     ***********************************/
+    public void asterixParse(ClassAd classad, DataOutput out) throws IOException, AsterixException {
+        // we assume the lexer source used here is a char array
+        parseClassAd(currentSource, classad, false);
+        parseRecord(null, classad, out);
+    }
+
+    public void handleErrorParsing() throws IOException {
+    }
+
+    private boolean asterixParseClassAd(ClassAd ad) throws IOException {
+        TokenType tt;
+        ad.clear();
+        lexer.initialize(currentSource);
+        if ((tt = lexer.consumeToken()) != TokenType.LEX_OPEN_BOX) {
+            handleErrorParsing();
+            return false;
+        }
+        tt = lexer.peekToken();
+        TokenValue tv = tokenValuePool.get();
+        ExprTreeHolder tree = mutableExprPool.get();
+        while (tt != TokenType.LEX_CLOSE_BOX) {
+            // Get the name of the expression
+            tv.reset();
+            tree.reset();
+            tt = lexer.consumeToken(tv);
+            if (tt == TokenType.LEX_SEMICOLON) {
+                // We allow empty expressions, so if someone give a double
+                // semicolon, it doesn't
+                // hurt. Technically it's not right, but we shouldn't make users
+                // pay the price for
+                // a meaningless mistake. See condor-support #1881 for a user
+                // that was bitten by this.
+                continue;
+            }
+            if (tt != TokenType.LEX_IDENTIFIER) {
+                throw new HyracksDataException(
+                        "while parsing classad:  expected LEX_IDENTIFIER " + " but got " + Lexer.strLexToken(tt));
+            }
+
+            // consume the intermediate '='
+            if ((tt = lexer.consumeToken()) != TokenType.LEX_BOUND_TO) {
+                throw new HyracksDataException(
+                        "while parsing classad:  expected LEX_BOUND_TO " + " but got " + Lexer.strLexToken(tt));
+            }
+
+            int positionBefore = lexer.getLexSource().getPosition();
+            isExpr = false;
+            // parse the expression
+            parseExpression(tree);
+            if (tree.getInnerTree() == null) {
+                handleErrorParsing();
+                throw new HyracksDataException("parse expression returned empty tree");
+            }
+
+            if ((!evaluateExpr || keepBoth) && isExpr && positionBefore >= 0) {
+                // we will store a string representation of the expression
+                int len = lexer.getLexSource().getPosition() - positionBefore;
+                // add it as it is to the classAd
+                Literal lit = literalPool.get();
+                Value exprVal = valuePool.get();
+                exprVal.setStringValue(exprPrefix
+                        + String.valueOf(lexer.getLexSource().getBuffer(), positionBefore, len) + exprSuffix);
+                Literal.createLiteral(lit, exprVal, NumberFactor.NO_FACTOR);
+                if (!evaluateExpr) {
+                    ad.insert(tv.getStrValue().toString(), lit);
+                } else {
+                    ad.insert(tv.getStrValue().toString() + exprFieldNameSuffix, lit);
+                }
+            }
+            if (!isExpr || (evaluateExpr)) {
+                // insert the attribute into the classad
+                if (!ad.insert(tv.getStrValue().toString(), tree)) {
+                    handleErrorParsing();
+                    throw new HyracksDataException("Couldn't insert value to classad");
+                }
+            }
+            // the next token must be a ';' or a ']'
+            tt = lexer.peekToken();
+            if (tt != TokenType.LEX_SEMICOLON && tt != TokenType.LEX_CLOSE_BOX) {
+                handleErrorParsing();
+                throw new HyracksDataException("while parsing classad:  expected LEX_SEMICOLON or "
+                        + "LEX_CLOSE_BOX but got " + Lexer.strLexToken(tt));
+            }
+
+            // Slurp up any extra semicolons. This does not duplicate the work
+            // at the top of the loop
+            // because it accounts for the case where the last expression has
+            // extra semicolons,
+            // while the first case accounts for optional beginning semicolons.
+            while (tt == TokenType.LEX_SEMICOLON) {
+                lexer.consumeToken();
+                tt = lexer.peekToken();
+            }
+        }
+        return true;
+    }
+
+    public static String readLine(char[] buffer, AMutableInt32 offset, int maxOffset) {
+        int position = offset.getIntegerValue();
+        while (buffer[position] != '\n' && position < maxOffset) {
+            position++;
+        }
+        if (offset.getIntegerValue() == position) {
+            return null;
+        }
+        String line = String.valueOf(buffer, offset.getIntegerValue(), position - offset.getIntegerValue());
+        position++;
+        offset.setValue(position);
+        return line;
+    }
+
+    private AMutableInt32 aInt32 = new AMutableInt32(0);
+
+    /**
+     * Resets the pools before parsing a top-level record. In this way the
+     * elements in those pools can be re-used.
+     */
+    private void resetPools() {
+        listBuilderPool.reset();
+        recordBuilderPool.reset();
+        abvsBuilderPool.reset();
+        mutableExprPool.reset();
+        tokenValuePool.reset();
+        classAdPool.reset();
+        exprListPool.reset();
+        valuePool.reset();
+        literalPool.reset();
+        bitSetPool.reset();
+        operationPool.reset();
+        attrRefPool.reset();
+    }
+
+    private ATypeTag getTargetTypeTag(ATypeTag expectedTypeTag, IAType aObjectType) throws IOException {
+        if (aObjectType == null) {
+            return expectedTypeTag;
+        }
+        if (aObjectType.getTypeTag() != ATypeTag.UNION) {
+            final ATypeTag typeTag = aObjectType.getTypeTag();
+            if (ATypeHierarchy.canPromote(expectedTypeTag, typeTag)
+                    || ATypeHierarchy.canDemote(expectedTypeTag, typeTag)) {
+                return typeTag;
+            } else {
+                return null;
+            }
+        } else { // union
+            List<IAType> unionList = ((AUnionType) aObjectType).getUnionList();
+            for (IAType t : unionList) {
+                final ATypeTag typeTag = t.getTypeTag();
+                if (ATypeHierarchy.canPromote(expectedTypeTag, typeTag)
+                        || ATypeHierarchy.canDemote(expectedTypeTag, typeTag)) {
+                    return typeTag;
+                }
+            }
+        }
+        return null;
+    }
+
+    private void parseRecord(ARecordType recType, ClassAd pAd, DataOutput out) throws IOException, AsterixException {
+        ArrayBackedValueStorage fieldValueBuffer = getTempBuffer();
+        ArrayBackedValueStorage fieldNameBuffer = getTempBuffer();
+        IARecordBuilder recBuilder = getRecordBuilder();
+        BitSet nulls = null;
+        if (recType != null) {
+            nulls = getBitSet();
+            recBuilder.reset(recType);
+        } else {
+            recBuilder.reset(null);
+        }
+        recBuilder.init();
+        Boolean openRecordField = false;
+        int fieldId = 0;
+        IAType fieldType = null;
+
+        // new stuff
+        Map<CaseInsensitiveString, ExprTree> attrs = pAd.getAttrList();
+        for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
+            // reset buffers
+            fieldNameBuffer.reset();
+            fieldValueBuffer.reset();
+            // take care of field name
+            String fldName = entry.getKey().get();
+            if (recType != null) {
+                fieldId = recBuilder.getFieldId(fldName);
+                if (fieldId < 0 && !recType.isOpen()) {
+                    throw new HyracksDataException("This record is closed, you can not add extra fields !!");
+                } else if (fieldId < 0 && recType.isOpen()) {
+                    aStringFieldName.setValue(fldName);
+                    if (aStringFieldName.getStringValue().contains("org.apache.asterix.external.classad.TokenValue")) {
+                        System.err.println("we have a problem");
+                    }
+                    stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
+                    openRecordField = true;
+                    fieldType = null;
+                } else {
+                    // a closed field
+                    nulls.set(fieldId);
+                    fieldType = recType.getFieldTypes()[fieldId];
+                    openRecordField = false;
+                }
+            } else {
+                aStringFieldName.setValue(fldName);
+                stringSerde.serialize(aStringFieldName, fieldNameBuffer.getDataOutput());
+                openRecordField = true;
+                fieldType = null;
+            }
+
+            // add field value to value buffer
+            writeFieldValueToBuffer(fieldType, fieldValueBuffer.getDataOutput(), fldName, entry.getValue(), pAd);
+            if (openRecordField) {
+                if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
+                    recBuilder.addField(fieldNameBuffer, fieldValueBuffer);
+                }
+            } else if (NonTaggedFormatUtil.isOptional(fieldType)) {
+                if (fieldValueBuffer.getByteArray()[0] != ATypeTag.NULL.serialize()) {
+                    recBuilder.addField(fieldId, fieldValueBuffer);
+                }
+            } else {
+                recBuilder.addField(fieldId, fieldValueBuffer);
+            }
+        }
+
+        if (recType != null) {
+            int nullableFieldId = checkNullConstraints(recType, nulls);
+            if (nullableFieldId != -1) {
+                throw new HyracksDataException(
+                        "Field: " + recType.getFieldNames()[nullableFieldId] + " can not be null");
+            }
+        }
+        recBuilder.write(out, true);
+    }
+
+    // The only method left
+    private void writeFieldValueToBuffer(IAType fieldType, DataOutput out, String name, ExprTree tree, ClassAd pAd)
+            throws IOException, AsterixException {
+        Value val;
+        switch (tree.getKind()) {
+            case ATTRREF_NODE:
+            case CLASSAD_NODE:
+            case EXPR_ENVELOPE:
+            case EXPR_LIST_NODE:
+            case FN_CALL_NODE:
+            case OP_NODE:
+                val = valuePool.get();
+                if (pAd.evaluateAttr(name, val)) {
+
+                } else {
+                    // just write the expr
+                    val = ((Literal) pAd.getAttrList().get(name + "Expr")).getValue();
+                }
+                break;
+            case LITERAL_NODE:
+                val = ((Literal) tree).getValue();
+                break;
+            default:
+                throw new HyracksDataException("Unknown Expression type detected: " + tree.getKind());
+        }
+
+        switch (val.getValueType()) {
+            case ABSOLUTE_TIME_VALUE:
+                if (checkType(ATypeTag.DATETIME, fieldType)) {
+                    parseDateTime(val, out);
+                } else {
+                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
+                }
+                break;
+            case BOOLEAN_VALUE:
+                if (checkType(ATypeTag.BOOLEAN, fieldType)) {
+                    booleanSerde.serialize(val.getBoolVal() ? ABoolean.TRUE : ABoolean.FALSE, out);
+                } else {
+                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
+                }
+                break;
+            case CLASSAD_VALUE:
+                if (checkType(ATypeTag.RECORD, fieldType)) {
+                    IAType objectType = getComplexType(fieldType, ATypeTag.RECORD);
+                    ClassAd classad = val.getClassadVal();
+                    parseRecord((ARecordType) objectType, classad, out);
+                } else {
+                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
+                }
+                break;
+            case ERROR_VALUE:
+            case STRING_VALUE:
+            case UNDEFINED_VALUE:
+                if (checkType(ATypeTag.STRING, fieldType)) {
+                    parseString(val, out);
+                } else {
+                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
+                }
+                break;
+            case INTEGER_VALUE:
+                if (checkType(ATypeTag.INT64, fieldType)) {
+                    aInt64.setValue(val.getLongVal());
+                    int64Serde.serialize(aInt64, out);
+                } else if (checkType(ATypeTag.DOUBLE, fieldType)) {
+                    aDouble.setValue(val.getLongVal());
+                    doubleSerde.serialize(aDouble, out);
+                } else {
+                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
+                }
+                break;
+            case LIST_VALUE:
+            case SLIST_VALUE:
+                IAType objectType;
+                if (checkType(ATypeTag.UNORDEREDLIST, fieldType)) {
+                    objectType = getComplexType(fieldType, ATypeTag.UNORDEREDLIST);
+                    parseUnorderedList((AUnorderedListType) objectType, val, out);
+                } else if (checkType(ATypeTag.ORDEREDLIST, fieldType)) {
+                    objectType = getComplexType(fieldType, ATypeTag.ORDEREDLIST);
+                    parseOrderedList((AOrderedListType) objectType, val, out);
+                } else {
+                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
+                }
+                break;
+            case REAL_VALUE:
+                if (checkType(ATypeTag.DOUBLE, fieldType)) {
+                    aDouble.setValue(val.getDoubleVal());
+                    doubleSerde.serialize(aDouble, out);
+                } else if (checkType(ATypeTag.INT32, fieldType)) {
+                    aInt32.setValue((int) val.getDoubleVal());
+                    int32Serde.serialize(aInt32, out);
+                } else if (checkType(ATypeTag.INT64, fieldType)) {
+                    aInt64.setValue((long) val.getDoubleVal());
+                    int64Serde.serialize(aInt64, out);
+                } else {
+                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
+                }
+                break;
+            case RELATIVE_TIME_VALUE:
+                if (checkType(ATypeTag.DURATION, fieldType)) {
+                    parseDuration(val, out);
+                } else {
+                    throw new HyracksDataException(mismatchErrorMessage + fieldType.getTypeTag());
+                }
+                break;
+            default:
+                throw new HyracksDataException("unknown data type " + val.getValueType());
+        }
+    }
+
+    private void parseOrderedList(AOrderedListType oltype, Value listVal, DataOutput out)
+            throws IOException, AsterixException {
+        ArrayBackedValueStorage itemBuffer = getTempBuffer();
+        OrderedListBuilder orderedListBuilder = (OrderedListBuilder) getOrderedListBuilder();
+        IAType itemType = null;
+        if (oltype != null) {
+            itemType = oltype.getItemType();
+        }
+        orderedListBuilder.reset(oltype);
+        for (ExprTree tree : listVal.getListVal().getExprList()) {
+            itemBuffer.reset();
+            writeFieldValueToBuffer(itemType, itemBuffer.getDataOutput(), null, tree, null);
+            orderedListBuilder.addItem(itemBuffer);
+        }
+        orderedListBuilder.write(out, true);
+    }
+
+    private void parseUnorderedList(AUnorderedListType uoltype, Value listVal, DataOutput out)
+            throws IOException, AsterixException {
+        ArrayBackedValueStorage itemBuffer = getTempBuffer();
+        UnorderedListBuilder unorderedListBuilder = (UnorderedListBuilder) getUnorderedListBuilder();
+        IAType itemType = null;
+        if (uoltype != null) {
+            itemType = uoltype.getItemType();
+        }
+        unorderedListBuilder.reset(uoltype);
+        for (ExprTree tree : listVal.getListVal().getExprList()) {
+            itemBuffer.reset();
+            writeFieldValueToBuffer(itemType, itemBuffer.getDataOutput(), null, tree, null);
+            unorderedListBuilder.addItem(itemBuffer);
+        }
+        unorderedListBuilder.write(out, true);
+    }
+
+    private void parseString(Value val, DataOutput out) throws HyracksDataException {
+        switch (val.getValueType()) {
+            case ERROR_VALUE:
+                aString.setValue("error");
+                break;
+            case STRING_VALUE:
+                aString.setValue(val.getStringVal());
+                break;
+            case UNDEFINED_VALUE:
+                aString.setValue("undefined");
+                break;
+            default:
+                throw new HyracksDataException("Unknown String type " + val.getValueType());
+        }
+        stringSerde.serialize(aString, out);
+    }
+
+    protected void parseDuration(Value duration, DataOutput out) throws HyracksDataException {
+        try {
+            aDuration.setValue(0, duration.getTimeVal().getRelativeTime());
+            durationSerde.serialize(aDuration, out);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    protected void parseDateTime(Value datetime, DataOutput out) throws HyracksDataException {
+        aDateTime.setValue(datetime.getTimeVal().getTimeInMillis());
+        datetimeSerde.serialize(aDateTime, out);
+    }
+
+    public static IAType getComplexType(IAType aObjectType, ATypeTag tag) {
+        if (aObjectType == null) {
+            return null;
+        }
+
+        if (aObjectType.getTypeTag() == tag) {
+            return aObjectType;
+        }
+
+        if (aObjectType.getTypeTag() == ATypeTag.UNION) {
+            List<IAType> unionList = ((AUnionType) aObjectType).getUnionList();
+            for (int i = 0; i < unionList.size(); i++) {
+                if (unionList.get(i).getTypeTag() == tag) {
+                    return unionList.get(i);
+                }
+            }
+        }
+        return null; // wont get here
+    }
+
+    private String mismatchErrorMessage = "Mismatch Type, expecting a value of type ";
+    private Map<String, String> configuration;
+
+    private boolean checkType(ATypeTag expectedTypeTag, IAType aObjectType) throws IOException {
+        return getTargetTypeTag(expectedTypeTag, aObjectType) != null;
+    }
+
+    private BitSet getBitSet() {
+        return bitSetPool.get();
+    }
+
+    public static int checkNullConstraints(ARecordType recType, BitSet nulls) {
+        boolean isNull = false;
+        for (int i = 0; i < recType.getFieldTypes().length; i++) {
+            if (nulls.get(i) == false) {
+                IAType type = recType.getFieldTypes()[i];
+                if (type.getTypeTag() != ATypeTag.NULL && type.getTypeTag() != ATypeTag.UNION) {
+                    return i;
+                }
+
+                if (type.getTypeTag() == ATypeTag.UNION) { // union
+                    List<IAType> unionList = ((AUnionType) type).getUnionList();
+                    for (int j = 0; j < unionList.size(); j++) {
+                        if (unionList.get(j).getTypeTag() == ATypeTag.NULL) {
+                            isNull = true;
+                            break;
+                        }
+                    }
+                    if (!isNull) {
+                        return i;
+                    }
+                }
+            }
+        }
+        return -1;
+    }
+
+    private IARecordBuilder getRecordBuilder() {
+        return recordBuilderPool.allocate(ATypeTag.RECORD);
+    }
+
+    private IAsterixListBuilder getOrderedListBuilder() {
+        return listBuilderPool.allocate(ATypeTag.ORDEREDLIST);
+    }
+
+    private IAsterixListBuilder getUnorderedListBuilder() {
+        return listBuilderPool.allocate(ATypeTag.UNORDEREDLIST);
+    }
+
+    private ArrayBackedValueStorage getTempBuffer() {
+        return (ArrayBackedValueStorage) abvsBuilderPool.allocate(ATypeTag.BINARY);
+    }
+
+    public static ATypeTag getMatchingType(Literal lit) throws HyracksDataException {
+        return getMatchingType(lit.getValue());
+    }
+
+    public static ATypeTag getMatchingType(Value val) throws HyracksDataException {
+        switch (val.getValueType()) {
+            case ABSOLUTE_TIME_VALUE:
+                return ATypeTag.DATETIME;
+            case BOOLEAN_VALUE:
+                return ATypeTag.BOOLEAN;
+            case CLASSAD_VALUE:
+                return ATypeTag.RECORD;
+            case ERROR_VALUE:
+            case STRING_VALUE:
+            case UNDEFINED_VALUE:
+                return ATypeTag.STRING;
+            case INTEGER_VALUE:
+                return ATypeTag.INT64;
+            case LIST_VALUE:
+            case SLIST_VALUE:
+                return ATypeTag.UNORDEREDLIST;
+            case NULL_VALUE:
+                return ATypeTag.NULL;
+            case REAL_VALUE:
+                return ATypeTag.DOUBLE;
+            case RELATIVE_TIME_VALUE:
+                return ATypeTag.DURATION;
+            default:
+                throw new HyracksDataException("Unknown data type");
+        }
+    }
+
+    /********************************
+     * End of AsterixDB specifics
+     ********************************/
+
+    /**
+     * Parse a ClassAd
+     *
+     * @param buffer
+     *            Buffer containing the string representation of the classad.
+     * @param full
+     *            If this parameter is true, the parse is considered to succeed
+     *            only if the ClassAd was parsed successfully and no other
+     *            tokens follow the ClassAd.
+     * @return pointer to the ClassAd object if successful, or null otherwise
+     * @throws IOException
+     */
+    public ClassAd parseClassAd(String buffer, boolean full) throws IOException {
+        currentSource = new StringLexerSource(buffer);
+        return parseClassAd(currentSource, full);
+    }
+
+    public ClassAd parseClassAd(String buffer, AMutableInt32 offset) throws IOException {
+        currentSource = new StringLexerSource(buffer);
+        ClassAd ad = parseClassAd((StringLexerSource) currentSource);
+        offset.setValue(((StringLexerSource) currentSource).getCurrentLocation());
+        return ad;
+    }
+
+    public ClassAd parseClassAd(StringLexerSource lexer_source) throws IOException {
+        return parseClassAd(lexer_source, false);
+    }
+
+    public ClassAd parseClassAd(File file, boolean full) throws IOException {
+        FileLexerSource fileLexerSource = new FileLexerSource(file);
+        return parseClassAd(fileLexerSource, full);
+    }
+
+    public ClassAd parseClassAd(InputStream in, boolean full) throws IOException {
+        InputStreamLexerSource lexer_source = new InputStreamLexerSource(in);
+        return parseClassAd(lexer_source, full);
+    }
+
+    // preferred method since the parser doesn't need to create an object
+    public void parseClassAd(ClassAd ad, LexerSource lexer_source, boolean full) throws IOException {
+        ad.reset();
+        if (lexer.initialize(lexer_source)) {
+            if (!parseClassAd(ad, full)) {
+                return;
+            } else if (lexer_source.readPreviousCharacter() != '\0') {
+                // The lexer swallows one extra character, so if we have
+                // two classads back to back we need to make sure to unread
+                // one of the characters.
+                lexer_source.unreadCharacter();
+            }
+        }
+    }
+
+    public ClassAd parseClassAd(LexerSource lexer_source, boolean full) throws IOException {
+        System.out.println("Don't use this call. instead, pass a mutable classad instance");
+        ClassAd ad = classAdPool.get();
+        if (lexer.initialize(lexer_source)) {
+            if (!parseClassAd(ad, full)) {
+                return null;
+            } else if (lexer_source.readPreviousCharacter() != '\0') {
+                // The lexer swallows one extra character, so if we have
+                // two classads back to back we need to make sure to unread
+                // one of the characters.
+                lexer_source.unreadCharacter();
+            }
+        }
+        return ad;
+    }
+
+    /**
+     * Parse a ClassAd
+     *
+     * @param buffer
+     *            Buffer containing the string representation of the classad.
+     * @param ad
+     *            The classad to be populated
+     * @param full
+     *            If this parameter is true, the parse is considered to succeed
+     *            only if the ClassAd was parsed successfully and no other
+     *            tokens follow the ClassAd.
+     * @return true on success, false on failure
+     * @throws IOException
+     */
+    public boolean parseClassAd(String buffer, ClassAd classad, boolean full) throws IOException {
+        StringLexerSource stringLexerSource = new StringLexerSource(buffer);
+        return parseClassAd(stringLexerSource, classad, full);
+    }
+
+    public boolean parseClassAd(String buffer, ClassAd classad, AMutableInt32 offset) throws IOException {
+        boolean success = false;
+        StringLexerSource stringLexerSource = new StringLexerSource(buffer, offset.getIntegerValue().intValue());
+        success = parseClassAd(stringLexerSource, classad);
+        offset.setValue(stringLexerSource.getCurrentLocation());
+        return success;
+    }
+
+    public boolean parseNext(ClassAd classad) throws IOException {
+        return parseClassAd(currentSource, classad, false);
+    }
+
+    public boolean parseNext(ClassAd classad, boolean full) throws IOException {
+        return parseClassAd(currentSource, classad, full);
+    }
+
+    private boolean parseClassAd(StringLexerSource lexer_source, ClassAd classad) throws IOException {
+        return parseClassAd(lexer_source, classad, false);
+    }
+
+    public boolean parseClassAd(File file, ClassAd classad, boolean full) throws IOException {
+        FileLexerSource fileLexerSource = new FileLexerSource(file);
+        return parseClassAd(fileLexerSource, classad, full);
+    }
+
+    public boolean parseClassAd(InputStream stream, ClassAd classad, boolean full) throws IOException {
+        InputStreamLexerSource inputStreamLexerSource = new InputStreamLexerSource(stream);
+        return parseClassAd(inputStreamLexerSource, classad, full);
+    }
+
+    public boolean parseClassAd(LexerSource lexer_source, ClassAd classad, boolean full) throws IOException {
+        boolean success = false;
+        if (lexer.initialize(lexer_source)) {
+            success = parseClassAd(classad, full);
+        }
+        if (success) {
+            // The lexer swallows one extra character, so if we have
+            // two classads back to back we need to make sure to unread
+            // one of the characters.
+            if (lexer_source.readPreviousCharacter() != Lexer.EOF) {
+                lexer_source.unreadCharacter();
+            }
+        } else {
+            classad.clear();
+        }
+        return success;
+    }
+
+    /**
+     * Parse an expression
+     *
+     * @param buffer
+     *            Buffer containing the string representation of the expression.
+     * @param full
+     *            If this parameter is true, the parse is considered to succeed
+     *            only if the expression was parsed successfully and no other
+     *            tokens are left.
+     * @return pointer to the expression object if successful, or null otherwise
+     */
+    public ExprTree parseExpression(String buffer, boolean full) throws IOException {
+        stringLexerSource.setNewSource(buffer);
+        ExprTreeHolder mutableExpr = mutableExprPool.get();
+        if (lexer.initialize(stringLexerSource)) {
+            parseExpression(mutableExpr, full);
+        }
+        return mutableExpr.getInnerTree();
+    }
+
+    public ExprTree ParseExpression(String buffer) throws IOException {
+        return parseExpression(buffer, false);
+    }
+
+    public ExprTree parseExpression(LexerSource lexer_source, boolean full) throws IOException {
+        ExprTreeHolder mutableExpr = mutableExprPool.get();
+        if (lexer.initialize(lexer_source)) {
+            parseExpression(mutableExpr, full);
+        }
+        return mutableExpr.getInnerTree();
+    }
+
+    public ExprTree parseNextExpression() throws IOException {
+        if (!lexer.wasInitialized()) {
+            return null;
+        } else {
+            ExprTreeHolder expr = mutableExprPool.get();
+            parseExpression(expr, false);
+            ExprTree innerTree = expr.getInnerTree();
+            return innerTree;
+        }
+    }
+
+    /*--------------------------------------------------------------------
+    *
+    * Private Functions
+    *
+    *-------------------------------------------------------------------*/
+
+    // Expression .= LogicalORExpression
+    // | LogicalORExpression '?' Expression ':' Expression
+
+    private boolean parseExpression(ExprTreeHolder tree) throws IOException {
+        return parseExpression(tree, false);
+    }
+
+    private boolean parseExpression(ExprTreeHolder tree, boolean full) throws IOException {
+        TokenType tt;
+        if (!parseLogicalORExpression(tree))
+            return false;
+        if ((tt = lexer.peekToken()) == TokenType.LEX_QMARK) {
+            lexer.consumeToken();
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeM = mutableExprPool.get();
+            ExprTreeHolder treeR = mutableExprPool.get();
+            parseExpression(treeM);
+            if ((tt = lexer.consumeToken()) != TokenType.LEX_COLON) {
+                throw new HyracksDataException("expected LEX_COLON, but got " + Lexer.strLexToken(tt));
+            }
+            parseExpression(treeR);
+            if (treeL.getInnerTree() != null && treeM.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(Operation.OpKind_TERNARY_OP, treeL, treeM, treeR, newTree);
+                tree.setInnerTree(newTree);
+                return (true);
+            }
+            tree.setInnerTree(null);
+            return false;
+        }
+        // if a full parse was requested, ensure that input is exhausted
+        if (full && (lexer.consumeToken() != TokenType.LEX_END_OF_INPUT)) {
+            throw new HyracksDataException(
+                    "expected LEX_END_OF_INPUT on full parse, but got " + String.valueOf(Lexer.strLexToken(tt)));
+        }
+        return true;
+    }
+
+    // LogicalORExpression .= LogicalANDExpression
+    // | LogicalORExpression '||' LogicalANDExpression
+
+    private boolean parseLogicalORExpression(ExprTreeHolder tree) throws IOException {
+        if (!parseLogicalANDExpression(tree))
+            return false;
+        while ((lexer.peekToken()) == TokenType.LEX_LOGICAL_OR) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            lexer.consumeToken();
+            parseLogicalANDExpression(treeR);
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(Operation.OpKind_LOGICAL_OR_OP, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+        }
+        return true;
+    }
+
+    // LogicalANDExpression .= InclusiveORExpression
+    // | LogicalANDExpression '&&' InclusiveORExpression
+    private boolean parseLogicalANDExpression(ExprTreeHolder tree) throws IOException {
+        if (!parseInclusiveORExpression(tree))
+            return false;
+        while ((lexer.peekToken()) == TokenType.LEX_LOGICAL_AND) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            lexer.consumeToken();
+            parseInclusiveORExpression(treeR);
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(Operation.OpKind_LOGICAL_AND_OP, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+        }
+        return true;
+    }
+
+    // InclusiveORExpression .= ExclusiveORExpression
+    // | InclusiveORExpression '|' ExclusiveORExpression
+    public boolean parseInclusiveORExpression(ExprTreeHolder tree) throws IOException {
+        if (!parseExclusiveORExpression(tree))
+            return false;
+        while ((lexer.peekToken()) == TokenType.LEX_BITWISE_OR) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            lexer.consumeToken();
+            parseExclusiveORExpression(treeR);
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(Operation.OpKind_BITWISE_OR_OP, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+        }
+        return true;
+    }
+
+    // ExclusiveORExpression .= ANDExpression
+    // | ExclusiveORExpression '^' ANDExpression
+    private boolean parseExclusiveORExpression(ExprTreeHolder tree) throws IOException {
+        if (!parseANDExpression(tree))
+            return false;
+        while ((lexer.peekToken()) == TokenType.LEX_BITWISE_XOR) {
+            lexer.consumeToken();
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            parseANDExpression(treeR);
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(Operation.OpKind_BITWISE_XOR_OP, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+        }
+        return true;
+    }
+
+    // ANDExpression .= EqualityExpression
+    // | ANDExpression '&' EqualityExpression
+    private boolean parseANDExpression(ExprTreeHolder tree) throws IOException {
+        if (!parseEqualityExpression(tree))
+            return false;
+        while ((lexer.peekToken()) == TokenType.LEX_BITWISE_AND) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            lexer.consumeToken();
+            parseEqualityExpression(treeR);
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(Operation.OpKind_BITWISE_AND_OP, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+        }
+        return true;
+    }
+
+    // EqualityExpression .= RelationalExpression
+    // | EqualityExpression '==' RelationalExpression
+    // | EqualityExpression '!=' RelationalExpression
+    // | EqualityExpression '=?=' RelationalExpression
+    // | EqualityExpression '=!=' RelationalExpression
+    private boolean parseEqualityExpression(ExprTreeHolder tree) throws IOException {
+        TokenType tt;
+        int op = Operation.OpKind_NO_OP;
+        if (!parseRelationalExpression(tree))
+            return false;
+        tt = lexer.peekToken();
+        while (tt == TokenType.LEX_EQUAL || tt == TokenType.LEX_NOT_EQUAL || tt == TokenType.LEX_META_EQUAL
+                || tt == TokenType.LEX_META_NOT_EQUAL) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            lexer.consumeToken();
+            parseRelationalExpression(treeR);
+            switch (tt) {
+                case LEX_EQUAL:
+                    op = Operation.OpKind_EQUAL_OP;
+                    break;
+                case LEX_NOT_EQUAL:
+                    op = Operation.OpKind_NOT_EQUAL_OP;
+                    break;
+                case LEX_META_EQUAL:
+                    op = Operation.OpKind_META_EQUAL_OP;
+                    break;
+                case LEX_META_NOT_EQUAL:
+                    op = Operation.OpKind_META_NOT_EQUAL_OP;
+                    break;
+                default:
+                    throw new HyracksDataException("ClassAd:  Should not reach here");
+            }
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(op, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+            tt = lexer.peekToken();
+        }
+        return true;
+    }
+
+    // RelationalExpression .= ShiftExpression
+    // | RelationalExpression '<' ShiftExpression
+    // | RelationalExpression '>' ShiftExpression
+    // | RelationalExpression '<=' ShiftExpression
+    // | RelationalExpression '>=' ShiftExpression
+    private boolean parseRelationalExpression(ExprTreeHolder tree) throws IOException {
+        TokenType tt;
+        if (!parseShiftExpression(tree))
+            return false;
+        tt = lexer.peekToken();
+        while (tt == TokenType.LEX_LESS_THAN || tt == TokenType.LEX_GREATER_THAN || tt == TokenType.LEX_LESS_OR_EQUAL
+                || tt == TokenType.LEX_GREATER_OR_EQUAL) {
+            int op = Operation.OpKind_NO_OP;
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            lexer.consumeToken();
+            parseShiftExpression(treeR);
+            switch (tt) {
+                case LEX_LESS_THAN:
+                    op = Operation.OpKind_LESS_THAN_OP;
+                    break;
+                case LEX_LESS_OR_EQUAL:
+                    op = Operation.OpKind_LESS_OR_EQUAL_OP;
+                    break;
+                case LEX_GREATER_THAN:
+                    op = Operation.OpKind_GREATER_THAN_OP;
+                    break;
+                case LEX_GREATER_OR_EQUAL:
+                    op = Operation.OpKind_GREATER_OR_EQUAL_OP;
+                    break;
+                default:
+                    throw new HyracksDataException("ClassAd:  Should not reach here");
+            }
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(op, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+            tt = lexer.peekToken();
+        }
+        return true;
+    }
+
+    // ShiftExpression .= AdditiveExpression
+    // | ShiftExpression '<<' AdditiveExpression
+    // | ShiftExpression '>>' AdditiveExpression
+    // | ShiftExpression '>>>' AditiveExpression
+    private boolean parseShiftExpression(ExprTreeHolder tree) throws IOException {
+        if (!parseAdditiveExpression(tree))
+            return false;
+
+        TokenType tt = lexer.peekToken();
+        while (tt == TokenType.LEX_LEFT_SHIFT || tt == TokenType.LEX_RIGHT_SHIFT || tt == TokenType.LEX_URIGHT_SHIFT) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            int op;
+            lexer.consumeToken();
+            parseAdditiveExpression(treeR);
+            switch (tt) {
+                case LEX_LEFT_SHIFT:
+                    op = Operation.OpKind_LEFT_SHIFT_OP;
+                    break;
+                case LEX_RIGHT_SHIFT:
+                    op = Operation.OpKind_RIGHT_SHIFT_OP;
+                    break;
+                case LEX_URIGHT_SHIFT:
+                    op = Operation.OpKind_URIGHT_SHIFT_OP;
+                    break;
+                default:
+                    op = Operation.OpKind_NO_OP; // Make gcc's -wuninitalized happy
+                    throw new HyracksDataException("ClassAd:  Should not reach here");
+            }
+
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(op, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+            tt = lexer.peekToken();
+        }
+        return true;
+    }
+
+    // AdditiveExpression .= MultiplicativeExpression
+    // | AdditiveExpression '+' MultiplicativeExpression
+    // | AdditiveExpression '-' MultiplicativeExpression
+    private boolean parseAdditiveExpression(ExprTreeHolder tree) throws IOException {
+        if (!parseMultiplicativeExpression(tree))
+            return false;
+
+        TokenType tt = lexer.peekToken();
+        while (tt == TokenType.LEX_PLUS || tt == TokenType.LEX_MINUS) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            lexer.consumeToken();
+            parseMultiplicativeExpression(treeR);
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(
+                        (tt == TokenType.LEX_PLUS) ? Operation.OpKind_ADDITION_OP : Operation.OpKind_SUBTRACTION_OP,
+                        treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+            tt = lexer.peekToken();
+        }
+        return true;
+    }
+
+    // MultiplicativeExpression .= UnaryExpression
+    // | MultiplicativeExpression '*' UnaryExpression
+    // | MultiplicativeExpression '/' UnaryExpression
+    // | MultiplicativeExpression '%' UnaryExpression
+    private boolean parseMultiplicativeExpression(ExprTreeHolder tree) throws IOException {
+        if (!parseUnaryExpression(tree))
+            return false;
+
+        TokenType tt = lexer.peekToken();
+        while (tt == TokenType.LEX_MULTIPLY || tt == TokenType.LEX_DIVIDE || tt == TokenType.LEX_MODULUS) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            int op;
+            lexer.consumeToken();
+            parseUnaryExpression(treeR);
+            switch (tt) {
+                case LEX_MULTIPLY:
+                    op = Operation.OpKind_MULTIPLICATION_OP;
+                    break;
+                case LEX_DIVIDE:
+                    op = Operation.OpKind_DIVISION_OP;
+                    break;
+                case LEX_MODULUS:
+                    op = Operation.OpKind_MODULUS_OP;
+                    break;
+                default:
+                    op = Operation.OpKind_NO_OP; // Make gcc's -wuninitalized happy
+                    throw new HyracksDataException("ClassAd:  Should not reach here");
+            }
+            if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(op, treeL, treeR, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+            tt = lexer.peekToken();
+        }
+        return true;
+    }
+
+    // UnaryExpression .= PostfixExpression
+    // | UnaryOperator UnaryExpression
+    // ( where UnaryOperator is one of { -, +, ~, ! } )
+    private boolean parseUnaryExpression(ExprTreeHolder tree) throws IOException {
+        TokenType tt = lexer.peekToken();
+        if (tt == TokenType.LEX_MINUS || tt == TokenType.LEX_PLUS || tt == TokenType.LEX_BITWISE_NOT
+                || tt == TokenType.LEX_LOGICAL_NOT) {
+            lexer.consumeToken();
+            ExprTreeHolder treeM = mutableExprPool.get();
+            int op = Operation.OpKind_NO_OP;
+            parseUnaryExpression(treeM);
+            switch (tt) {
+                case LEX_MINUS:
+                    op = Operation.OpKind_UNARY_MINUS_OP;
+                    break;
+                case LEX_PLUS:
+                    op = Operation.OpKind_UNARY_PLUS_OP;
+                    break;
+                case LEX_BITWISE_NOT:
+                    op = Operation.OpKind_BITWISE_NOT_OP;
+                    break;
+                case LEX_LOGICAL_NOT:
+                    op = Operation.OpKind_LOGICAL_NOT_OP;
+                    break;
+                default:
+                    throw new HyracksDataException("ClassAd: Shouldn't Get here");
+            }
+            if (treeM.getInnerTree() != null) {
+                Operation newTree = operationPool.get();
+                Operation.createOperation(op, treeM, null, null, newTree);
+                tree.setInnerTree(newTree);
+            } else {
+                tree.setInnerTree(null);
+                return (false);
+            }
+            return true;
+        } else {
+            return parsePostfixExpression(tree);
+        }
+    }
+
+    // PostfixExpression .= PrimaryExpression
+    // | PostfixExpression '.' Identifier
+    // | PostfixExpression '[' Expression ']'
+    private boolean parsePostfixExpression(ExprTreeHolder tree) throws IOException {
+        TokenType tt;
+        if (!parsePrimaryExpression(tree))
+            return false;
+        while ((tt = lexer.peekToken()) == TokenType.LEX_OPEN_BOX || tt == TokenType.LEX_SELECTION) {
+            ExprTreeHolder treeL = tree;
+            ExprTreeHolder treeR = mutableExprPool.get();
+            TokenValue tv = tokenValuePool.get();
+            lexer.consumeToken();
+            if (tt == TokenType.LEX_OPEN_BOX) {
+                // subscript operation
+                parseExpression(treeR);
+                if (treeL.getInnerTree() != null && treeR.getInnerTree() != null) {
+                    Operation newTree = operationPool.get();
+                    Operation.createOperation(Operation.OpKind_SUBSCRIPT_OP, treeL, treeR, null, newTree);
+                    if (lexer.consumeToken() == TokenType.LEX_CLOSE_BOX) {
+                        tree.setInnerTree(newTree);
+                        continue;
+                    }
+                }
+                tree.setInnerTree(null);
+                return false;
+            } else if (tt == TokenType.LEX_SELECTION) {
+                // field selection operation
+                if ((tt = lexer.consumeToken(tv)) != TokenType.LEX_IDENTIFIER) {
+                    throw new HyracksDataException("second argument of selector must be an " + "identifier (got"
+                            + String.valueOf(Lexer.strLexToken(tt)) + ")");
+                }
+                AttributeReference newTree = attrRefPool.get();
+                AttributeReference.createAttributeReference(treeL, tv.getStrValue(), false, newTree);
+                tree.setInnerTree(newTree);
+            }
+        }
+        return true;
+    }
+
+    // PrimaryExpression .= Identifier
+    // | FunctionCall
+    // | '.' Identifier
+    // | '(' Expression ')'
+    // | Literal
+    // FunctionCall .= Identifier ArgumentList
+    // ( Constant may be
+    // boolean,undefined,error,string,integer,real,classad,list )
+    // ( ArgumentList non-terminal includes parentheses )
+    private boolean parsePrimaryExpression(ExprTreeHolder tree) throws IOException {
+        ExprTreeHolder treeL;
+        TokenValue tv = tokenValuePool.get();
+        TokenType tt;
+        tree.setInnerTree(null);
+        switch ((tt = lexer.peekToken(tv))) {
+            // identifiers
+            case LEX_IDENTIFIER:
+                isExpr = true;
+                lexer.consumeToken();
+                // check for funcion call
+                if ((tt = lexer.peekToken()) == TokenType.LEX_OPEN_PAREN) {
+                    ExprList argList = exprListPool.get();
+                    if (!parseArgumentList(argList)) {
+                        tree.setInnerTree(null);
+                        return false;
+                    };
+                    // special case function-calls should be converted
+                    // into a literal expression if the argument is a
+                    // string literal
+                    if (shouldEvaluateAtParseTime(tv.getStrValue().toString(), argList)) {
+                        tree.setInnerTree(evaluateFunction(tv.getStrValue().toString(), argList));
+                    } else {
+                        tree.setInnerTree(FunctionCall.createFunctionCall(tv.getStrValue().toString(), argList));
+                    }
+                } else {
+                    // I don't think this is ever hit
+                    tree.setInnerTree(AttributeReference.createAttributeReference(null, tv.getStrValue(), false));
+                }
+                return (tree.getInnerTree() != null);
+            case LEX_SELECTION:
+                isExpr = true;
+                lexer.consumeToken();
+                if ((tt = lexer.consumeToken(tv)) == TokenType.LEX_IDENTIFIER) {
+                    // the boolean final arg signifies that reference is absolute
+                    tree.setInnerTree(AttributeReference.createAttributeReference(null, tv.getStrValue(), true));
+                    return (tree.size() != 0);
+                }
+                // not an identifier following the '.'
+                throw new HyracksDataException(
+                        "need identifier in selection expression (got" + Lexer.strLexToken(tt) + ")");
+                // parenthesized expression
+            case LEX_OPEN_PAREN: {
+                isExpr = true;
+                lexer.consumeToken();
+                treeL = mutableExprPool.get();
+                parseExpression(treeL);
+                if (treeL.getInnerTree() == null) {
+                    tree.resetExprTree(null);
+                    return false;
+                }
+
+                if ((tt = lexer.consumeToken()) != TokenType.LEX_CLOSE_PAREN) {
+                    throw new HyracksDataException("exptected LEX_CLOSE_PAREN, but got " + Lexer.strLexToken(tt));
+                    // tree.resetExprTree(null);
+                    // return false;
+                }
+                // assume make operation will return a new tree
+                tree.setInnerTree(Operation.createOperation(Operation.OpKind_PARENTHESES_OP, treeL));
+                return (tree.size() != 0);
+            }
+                // constants
+            case LEX_OPEN_BOX: {
+                isExpr = true;
+                ClassAd newAd = classAdPool.get();
+                if (!parseClassAd(newAd)) {
+                    tree.resetExprTree(null);
+                    return false;
+                }
+                tree.setInnerTree(newAd);
+            }
+                return true;
+
+            case LEX_OPEN_BRACE: {
+                isExpr = true;
+                ExprList newList = exprListPool.get();
+                if (!parseExprList(newList)) {
+                    tree.setInnerTree(null);
+                    return false;
+                }
+                tree.setInnerTree(newList);
+            }
+                return true;
+
+            case LEX_UNDEFINED_VALUE: {
+                Value val = valuePool.get();
+                lexer.consumeToken();
+                val.setUndefinedValue();
+                tree.setInnerTree(Literal.createLiteral(val));
+                return (tree.getInnerTree() != null);
+            }
+            case LEX_ERROR_VALUE: {
+                Value val = valuePool.get();
+                lexer.consumeToken();
+                val.setErrorValue();
+                tree.setInnerTree(Literal.createLiteral(val));
+                return (tree.getInnerTree() != null);
+            }
+            case LEX_BOOLEAN_VALUE: {
+                Value val = valuePool.get();
+                MutableBoolean b = new MutableBoolean();
+                tv.getBoolValue(b);
+                lexer.consumeToken();
+                val.setBooleanValue(b);
+                tree.setInnerTree(Literal.createLiteral(val));
+                return (tree.getInnerTree() != null);
+            }
+
+            case LEX_INTEGER_VALUE: {
+                Value val = valuePool.get();
+                lexer.consumeToken();
+                val.setIntegerValue(tv.getIntValue());
+                tree.setInnerTree(Literal.createLiteral(val, tv.getFactor()));
+                return (tree.getInnerTree() != null);
+            }
+
+            case LEX_REAL_VALUE: {
+                Value val = valuePool.get();
+                lexer.consumeToken();
+                val.setRealValue(tv.getRealValue());
+                tree.setInnerTree(Literal.createLiteral(val, tv.getFactor()));
+                return (tree.getInnerTree() != null);
+            }
+
+            case LEX_STRING_VALUE: {
+                Value val = valuePool.get();
+                lexer.consumeToken();
+                val.setStringValue(tv.getStrValue());
+                tree.setInnerTree(Literal.createLiteral(val));
+                return (tree.getInnerTree() != null);
+            }
+
+            case LEX_ABSOLUTE_TIME_VALUE: {
+                Value val = valuePool.get();
+                lexer.consumeToken();
+                val.setAbsoluteTimeValue(tv.getTimeValue());
+                tree.setInnerTree(Literal.createLiteral(val));
+                return (tree.getInnerTree() != null);
+            }
+
+            case LEX_RELATIVE_TIME_VALUE: {
+                Value val = valuePool.get();
+                lexer.consumeToken();
+                val.setRelativeTimeValue(tv.getTimeValue().getRelativeTime());
+                tree.setInnerTree(Literal.createLiteral(val));
+                return (tree.getInnerTree() != null);
+            }
+
+            default:
+                tree.setInnerTree(null);
+                return false;
+        }
+    }
+
+    // ArgumentList .= '(' ListOfArguments ')'
+    // ListOfArguments .= (epsilon)
+    // | ListOfArguments ',' Expression
+    public boolean parseArgumentList(ExprList argList) throws IOException {
+        TokenType tt;
+        argList.clear();
+        if ((tt = lexer.consumeToken()) != TokenType.LEX_OPEN_PAREN) {
+            throw new HyracksDataException("expected LEX_OPEN_PAREN but got " + String.valueOf(Lexer.strLexToken(tt)));
+            // return false;
+        }
+        tt = lexer.peekToken();
+        ExprTreeHolder tree = mutableExprPool.get();
+        while (tt != TokenType.LEX_CLOSE_PAREN) {
+            // parse the expression
+            tree.reset();
+            parseExpression(tree);
+            if (tree.getInnerTree() == null) {
+                argList.clear();
+                return false;
+            }
+            // insert the expression into the argument list
+            argList.add(tree.getInnerTree());
+            // the next token must be a ',' or a ')'
+            // or it can be a ';' if using old ClassAd semantics
+            tt = lexer.peekToken();
+            if (tt == TokenType.LEX_COMMA || (tt == TokenType.LEX_SEMICOLON && false))
+                lexer.consumeToken();
+            else if (tt != TokenType.LEX_CLOSE_PAREN) {
+                argList.clear();
+                throw new HyracksDataException(
+                        "expected LEX_COMMA or LEX_CLOSE_PAREN but got " + String.valueOf(Lexer.strLexToken(tt)));
+                // return false;
+            }
+        }
+        lexer.consumeToken();
+        return true;
+    }
+
+    // ClassAd .= '[' AttributeList ']'
+    // AttributeList .= (epsilon)
+    // | Attribute ';' AttributeList
+    // Attribute .= Identifier '=' Expression
+    public boolean parseClassAd(ClassAd ad) throws IOException {
+        return parseClassAd(ad, false);
+    }
+
+    public boolean parseClassAdOld(ClassAd ad, boolean full) throws IOException {
+        return false;
+    }
+
+    public boolean parseClassAd(ClassAd ad, boolean full) throws IOException {
+        TokenType tt;
+        ad.clear();
+        if ((tt = lexer.consumeToken()) != TokenType.LEX_OPEN_BOX)
+            return false;
+        tt = lexer.peekToken();
+        TokenValue tv = tokenValuePool.get();
+        ExprTreeHolder tree = mutableExprPool.get();
+        while (tt != TokenType.LEX_CLOSE_BOX) {
+            // Get the name of the expression
+            tv.reset();
+            tree.reset();
+            tt = lexer.consumeToken(tv);
+            if (tt == TokenType.LEX_SEMICOLON) {
+                // We allow empty expressions, so if someone give a double
+                // semicolon, it doesn't
+                // hurt. Technically it's not right, but we shouldn't make users
+                // pay the price for
+                // a meaningless mistake. See condor-support #1881 for a user
+                // that was bitten by this.
+                continue;
+            }
+            if (tt != TokenType.LEX_IDENTIFIER) {
+                throw new HyracksDataException(
+                        "while parsing classad:  expected LEX_IDENTIFIER " + " but got " + Lexer.strLexToken(tt));
+            }
+
+            // consume the intermediate '='
+            if ((tt = lexer.consumeToken()) != TokenType.LEX_BOUND_TO) {
+                throw new HyracksDataException(
+                        "while parsing classad:  expected LEX_BOUND_TO " + " but got " + Lexer.strLexToken(tt));
+            }
+
+            isExpr = false;
+            // parse the expression
+            parseExpression(tree);
+            if (tree.getInnerTree() == null) {
+                throw new HyracksDataException("parse expression returned empty tree");
+            }
+
+            // insert the attribute into the classad
+            if (!ad.insert(tv.getStrValue().toString(), tree)) {
+                throw new HyracksDataException("Couldn't insert value to classad");
+            }
+
+            // the next token must be a ';' or a ']'
+            tt = lexer.peekToken();
+            if (tt != TokenType.LEX_SEMICOLON && tt != TokenType.LEX_CLOSE_BOX) {
+                throw new HyracksDataException("while parsing classad:  expected LEX_SEMICOLON or "
+                        + "LEX_CLOSE_BOX but got " + Lexer.strLexToken(tt));
+            }
+
+            // Slurp up any extra semicolons. This does not duplicate the work
+            // at the top of the loop
+            // because it accounts for the case where the last expression has
+            // extra semicolons,
+            // while the first case accounts for optional beginning semicolons.
+            while (tt == TokenType.LEX_SEMICOLON) {
+                lexer.consumeToken();
+                tt = lexer.peekToken();
+            }
+        }
+        lexer.consumeToken();
+        // if a full parse was requested, ensure that input is exhausted
+        if (full && (lexer.consumeToken() != TokenType.LEX_END_OF_INPUT)) {
+            throw new HyracksDataException("while parsing classad:  expected LEX_END_OF_INPUT for "
+                    + "full parse but got " + Lexer.strLexToken(tt));
+        }
+        return true;
+    }
+
+    // ExprList .= '{' ListOfExpressions '}'
+    // ListOfExpressions .= (epsilon)
+    // | Expression ',' ListOfExpressions
+    public boolean parseExprList(ExprList list) throws IOException {
+        return parseExprList(list, false);
+    }
+
+    public boolean parseExprList(ExprList list, boolean full) throws IOException {
+        TokenType tt;
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprList loe = new ExprList();
+
+        if ((tt = lexer.consumeToken()) != TokenType.LEX_OPEN_BRACE) {
+            throw new HyracksDataException(
+                    "while parsing expression list:  expected LEX_OPEN_BRACE" + " but got " + Lexer.strLexToken(tt));
+            // return false;
+        }
+        tt = lexer.peekToken();
+        while (tt != TokenType.LEX_CLOSE_BRACE) {
+            // parse the expression
+            parseExpression(tree);
+            if (tree.getInnerTree() == null) {
+                throw new HyracksDataException("while parsing expression list:  expected "
+                        + "LEX_CLOSE_BRACE or LEX_COMMA but got " + Lexer.strLexToken(tt));
+            }
+
+            // insert the expression into the list
+            loe.add(tree);
+
+            // the next token must be a ',' or a '}'
+            tt = lexer.peekToken();
+            if (tt == TokenType.LEX_COMMA)
+                lexer.consumeToken();
+            else if (tt != TokenType.LEX_CLOSE_BRACE) {
+                throw new HyracksDataException("while parsing expression list:  expected "
+                        + "LEX_CLOSE_BRACE or LEX_COMMA but got " + Lexer.strLexToken(tt));
+            }
+        }
+
+        lexer.consumeToken();
+        list.setValue(ExprList.createExprList(loe));
+
+        // if a full parse was requested, ensure that input is exhausted
+        if (full && (lexer.consumeToken() != TokenType.LEX_END_OF_INPUT)) {
+            list.clear();
+            throw new HyracksDataException("while parsing expression list:  expected "
+                    + "LEX_END_OF_INPUT for full parse but got " + Lexer.strLexToken(tt));
+        }
+        return true;
+    }
+
+    public boolean shouldEvaluateAtParseTime(String functionName, ExprList argList) throws HyracksDataException {
+        boolean should_eval = false;
+        if (functionName.equalsIgnoreCase("absTime") || functionName.equalsIgnoreCase("relTime")) {
+            if (argList.size() == 1 && argList.get(0).getKind() == NodeKind.LITERAL_NODE) {
+                Value val = new Value();
+                AMutableNumberFactor factor = new AMutableNumberFactor();
+                ((Literal) argList.get(0)).getComponents(val, factor);
+                if (val.isStringValue()) {
+                    should_eval = true;
+                }
+            }
+        }
+        return should_eval;
+    }
+
+    public ExprTree evaluateFunction(String functionName, ExprList argList) throws HyracksDataException {
+        Value val = new Value();
+        AMutableNumberFactor factor = new AMutableNumberFactor();
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ((Literal) argList.get(0)).getComponents(val, factor);
+
+        AMutableCharArrayString string_value = new AMutableCharArrayString();
+        if (val.isStringValue(string_value)) {
+            if (functionName.equalsIgnoreCase("absTime")) {
+                tree.setInnerTree(Literal.createAbsTime(string_value));
+            } else if (functionName.equalsIgnoreCase("relTime")) {
+                tree.setInnerTree(Literal.createRelTime(string_value));
+            } else {
+                tree.setInnerTree(FunctionCall.createFunctionCall(functionName, argList));
+            }
+        } else {
+            tree.setInnerTree(FunctionCall.createFunctionCall(functionName, argList));
+        }
+        return tree;
+    }
+
+    public TokenType peekToken() throws IOException {
+        if (lexer.wasInitialized()) {
+            return lexer.peekToken();
+        } else {
+            return TokenType.LEX_TOKEN_ERROR;
+        }
+    }
+
+    public TokenType consumeToken() throws IOException {
+        if (lexer.wasInitialized()) {
+            return lexer.consumeToken();
+        } else {
+            return TokenType.LEX_TOKEN_ERROR;
+        }
+    }
+
+    public boolean parseExpression(String buf, ExprTreeHolder tree) throws IOException {
+        return parseExpression(buf, tree, false);
+    }
+
+    public boolean parseExpression(String buf, ExprTreeHolder tree, boolean full) throws IOException {
+        boolean success;
+        StringLexerSource lexer_source = new StringLexerSource(buf);
+
+        success = false;
+        if (lexer.initialize(lexer_source)) {
+            success = parseExpression(tree, full);
+        }
+        return success;
+    }
+
+    public ClassAd parseClassAd(String input_basic) throws IOException {
+        return parseClassAd(input_basic, false);
+    }
+
+    public LexerSource getLexerSource() {
+        return currentSource;
+    }
+
+    public void setLexerSource(LexerSource lexerSource) {
+        this.currentSource = lexerSource;
+    }
+
+    public void reset() {
+        resetPools();
+    }
+
+    public Literal getLiteral() {
+        return literalPool.get();
+    }
+
+    @Override
+    public DataSourceType getDataSourceType() {
+        return ExternalDataUtils.isDataSourceStreamProvider(configuration) ? DataSourceType.STREAM
+                : DataSourceType.RECORDS;
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration, ARecordType recordType) throws IOException {
+        this.recordType = recordType;
+        this.configuration = configuration;
+        String parserConfig = configuration.get(ClassAdParserFactory.KEY_OLD_FORMAT);
+        if (parserConfig != null && parserConfig.equalsIgnoreCase(ExternalDataConstants.TRUE)) {
+            oldFormat = true;
+            rootAd.createParser();
+        }
+        parserConfig = configuration.get(ExternalDataConstants.KEY_READER);
+        if (parserConfig != null && parserConfig.equalsIgnoreCase(ExternalDataConstants.READER_LINE_SEPARATED)) {
+            oldFormat = true;
+            rootAd.createParser();
+        }
+
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_EVALUATE);
+        if (parserConfig != null && parserConfig.equalsIgnoreCase("false")) {
+            evaluateExpr = false;
+            keepBoth = false;
+        }
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_KEEP_EXPR);
+        if (parserConfig != null && parserConfig.equalsIgnoreCase("false")) {
+            keepBoth = false;
+            evaluateExpr = true;
+        }
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_PREFIX);
+        if (parserConfig != null && parserConfig.trim().length() > 0) {
+            exprPrefix = parserConfig;
+        }
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_SUFFIX);
+        if (parserConfig != null && parserConfig.trim().length() > 0) {
+            exprSuffix = parserConfig;
+        }
+        parserConfig = configuration.get(ClassAdParserFactory.KEY_EXPR_NAME_SUFFIX);
+        if (parserConfig != null && parserConfig.trim().length() > 0) {
+            exprFieldNameSuffix = parserConfig;
+        }
+        if (!oldFormat) {
+            configuration.put(ExternalDataConstants.KEY_RECORD_START, "[");
+            configuration.put(ExternalDataConstants.KEY_RECORD_END, "]");
+        }
+    }
+
+    @Override
+    public void parse(IRawRecord<? extends char[]> record, DataOutput out) throws IOException {
+        try {
+            if (oldFormat) {
+                int maxOffset = record.size();
+                rootAd.clear();
+                char[] buffer = record.get();
+                aInt32.setValue(0);
+                String line = readLine(buffer, aInt32, maxOffset);
+                while (line != null) {
+                    if (line.trim().length() == 0) {
+                        if (rootAd.size() == 0) {
+                            line = readLine(buffer, aInt32, maxOffset);
+                            continue;
+                        }
+                        break;
+                    } else if (!rootAd.insert(line)) {
+                        throw new HyracksDataException("Couldn't parse expression in line: " + line);
+                    }
+                    line = readLine(buffer, aInt32, maxOffset);
+                }
+            } else {
+                resetPools();
+                currentSource.setNewSource(record.get());
+                rootAd.reset();
+                asterixParseClassAd(rootAd);
+            }
+            parseRecord(recordType, rootAd, out);
+        } catch (Exception e) {
+            throw new HyracksDataException(e);
+        }
+    }
+
+    @Override
+    public Class<? extends char[]> getRecordClass() {
+        return char[].class;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java
new file mode 100644
index 0000000..97982df
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/ClassAdParserFactory.java
@@ -0,0 +1,100 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.library;
+
+import java.io.IOException;
+import java.util.Map;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IExternalDataSourceFactory.DataSourceType;
+import org.apache.asterix.external.api.IRecordDataParser;
+import org.apache.asterix.external.api.IRecordDataParserFactory;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ClassAdParserFactory implements IRecordDataParserFactory<char[]> {
+
+    private static final long serialVersionUID = 1L;
+    public static final String KEY_OLD_FORMAT = "old-format";
+    public static final String KEY_EVALUATE = "evaluate";
+    public static final String KEY_KEEP_EXPR = "keep-expr";
+    public static final String KEY_EXPR_PREFIX = "expr-prefix";
+    public static final String KEY_EXPR_SUFFIX = "expr-suffix";
+    public static final String KEY_EXPR_NAME_SUFFIX = "expr-name-suffix";
+
+    private ARecordType recordType;
+    private Map<String, String> configuration;
+    private boolean oldFormat = false;
+
+    private void writeObject(java.io.ObjectOutputStream stream) throws IOException {
+        stream.writeObject(recordType);
+        stream.writeObject(configuration);
+    }
+
+    @SuppressWarnings("unchecked")
+    private void readObject(java.io.ObjectInputStream stream) throws IOException, ClassNotFoundException {
+        recordType = (ARecordType) stream.readObject();
+        configuration = (Map<String, String>) stream.readObject();
+    }
+
+    @Override
+    public DataSourceType getDataSourceType() throws AsterixException {
+        return DataSourceType.RECORDS;
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration) throws Exception {
+        this.configuration = configuration;
+        // is old format?
+        String parserConfig = configuration.get(KEY_OLD_FORMAT);
+        if (parserConfig != null && parserConfig.equalsIgnoreCase(ExternalDataConstants.TRUE)) {
+            oldFormat = true;
+        }
+        parserConfig = configuration.get(ExternalDataConstants.KEY_READER);
+        if (parserConfig != null && parserConfig.equalsIgnoreCase(ExternalDataConstants.READER_LINE_SEPARATED)) {
+            oldFormat = true;
+        }
+        if (!oldFormat) {
+            configuration.put(ExternalDataConstants.KEY_RECORD_START, "[");
+            configuration.put(ExternalDataConstants.KEY_RECORD_END, "]");
+        }
+
+    }
+
+    @Override
+    public void setRecordType(ARecordType recordType) {
+        this.recordType = recordType;
+    }
+
+    @Override
+    public IRecordDataParser<char[]> createRecordParser(IHyracksTaskContext ctx)
+            throws HyracksDataException, AsterixException, IOException {
+        ClassAdParser parser = new ClassAdParser(recordType);
+        parser.configure(configuration, recordType);
+        return parser;
+    }
+
+    @Override
+    public Class<? extends char[]> getRecordClass() {
+        return char[].class;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
index 852b31d..e34a09b 100644
--- a/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/library/adapter/TestTypedAdapterFactory.java
@@ -21,12 +21,13 @@ package org.apache.asterix.external.library.adapter;
 import java.io.InputStream;
 import java.util.Map;
 
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.asterix.external.api.IAdapterFactory;
 import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.asterix.external.parser.ADMDataParser;
 import org.apache.asterix.external.util.DataflowUtils;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.external.util.FeedUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksCountPartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
@@ -59,6 +60,7 @@ public class TestTypedAdapterFactory implements IAdapterFactory {
 
     @Override
     public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+        final String nodeId = ctx.getJobletContext().getApplicationContext().getNodeId();
         ITupleParserFactory tupleParserFactory = new ITupleParserFactory() {
             private static final long serialVersionUID = 1L;
 
@@ -69,10 +71,13 @@ public class TestTypedAdapterFactory implements IAdapterFactory {
                 ArrayTupleBuilder tb;
                 try {
                     parser = new ADMDataParser();
-                    forwarder = DataflowUtils.getTupleForwarder(configuration);
+                    forwarder = DataflowUtils.getTupleForwarder(configuration,
+                            FeedUtils.getFeedLogManager(ctx, partition,
+                                    FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
+                                            ExternalDataUtils.getFeedName(configuration), nodeId, partition)));
                     forwarder.configure(configuration);
                     tb = new ArrayTupleBuilder(1);
-                } catch (AsterixException e) {
+                } catch (Exception e) {
                     throw new HyracksDataException(e);
                 }
                 return new ITupleParser() {


[25/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
new file mode 100644
index 0000000..c0245d7
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedOperations.java
@@ -0,0 +1,254 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.util.Collection;
+import java.util.List;
+
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.feed.api.IFeedJoint;
+import org.apache.asterix.external.feed.api.IFeedMessage;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.management.FeedId;
+import org.apache.asterix.external.feed.message.EndFeedMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
+import org.apache.asterix.external.feed.message.PrepareStallMessage;
+import org.apache.asterix.external.feed.message.TerminateDataFlowMessage;
+import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
+import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
+import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
+import org.apache.asterix.external.operators.FeedMessageOperatorDescriptor;
+import org.apache.asterix.external.util.FeedConstants;
+import org.apache.asterix.file.JobSpecificationUtils;
+import org.apache.asterix.metadata.declared.AqlMetadataProvider;
+import org.apache.asterix.metadata.entities.Feed;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.algebricks.common.utils.Triple;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
+import org.apache.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
+
+/**
+ * Provides helper method(s) for creating JobSpec for operations on a feed.
+ */
+public class FeedOperations {
+
+    /**
+     * Builds the job spec for ingesting a (primary) feed from its external source via the feed adaptor.
+     * @param primaryFeed
+     * @param metadataProvider
+     * @return JobSpecification the Hyracks job specification for receiving data from external source
+     * @throws Exception
+     */
+    public static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(Feed primaryFeed,
+            AqlMetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
+
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        spec.setFrameSize(FeedConstants.JobConstants.DEFAULT_FRAME_SIZE);
+        IAdapterFactory adapterFactory = null;
+        IOperatorDescriptor feedIngestor;
+        AlgebricksPartitionConstraint ingesterPc;
+
+        try {
+            Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> t = metadataProvider
+                    .buildFeedIntakeRuntime(spec, primaryFeed, policyAccessor);
+            feedIngestor = t.first;
+            ingesterPc = t.second;
+            adapterFactory = t.third;
+        } catch (AlgebricksException e) {
+            e.printStackTrace();
+            throw new AsterixException(e);
+        }
+
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedIngestor, ingesterPc);
+
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, ingesterPc);
+        spec.connect(new OneToOneConnectorDescriptor(spec), feedIngestor, 0, nullSink, 0);
+        spec.addRoot(nullSink);
+        return new Pair<JobSpecification, IAdapterFactory>(spec, adapterFactory);
+    }
+
+    public static JobSpecification buildDiscontinueFeedSourceSpec(AqlMetadataProvider metadataProvider, FeedId feedId)
+            throws AsterixException, AlgebricksException {
+
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IOperatorDescriptor feedMessenger = null;
+        AlgebricksPartitionConstraint messengerPc = null;
+
+        List<String> locations = FeedLifecycleListener.INSTANCE.getIntakeLocations(feedId);
+        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDiscontinueFeedMessengerRuntime(spec, feedId,
+                locations);
+
+        feedMessenger = p.first;
+        messengerPc = p.second;
+
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
+        spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
+        spec.addRoot(nullSink);
+
+        return spec;
+    }
+
+    /**
+     * Builds the job spec for sending message to an active feed to disconnect it from the
+     * its source.
+     */
+    public static Pair<JobSpecification, Boolean> buildDisconnectFeedJobSpec(AqlMetadataProvider metadataProvider,
+            FeedConnectionId connectionId) throws AsterixException, AlgebricksException {
+
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IOperatorDescriptor feedMessenger;
+        AlgebricksPartitionConstraint messengerPc;
+        List<String> locations = null;
+        FeedRuntimeType sourceRuntimeType;
+        try {
+            FeedConnectJobInfo cInfo = FeedLifecycleListener.INSTANCE.getFeedConnectJobInfo(connectionId);
+            IFeedJoint sourceFeedJoint = cInfo.getSourceFeedJoint();
+            IFeedJoint computeFeedJoint = cInfo.getComputeFeedJoint();
+
+            boolean terminateIntakeJob = false;
+            boolean completeDisconnect = computeFeedJoint == null || computeFeedJoint.getReceivers().isEmpty();
+            if (completeDisconnect) {
+                sourceRuntimeType = FeedRuntimeType.INTAKE;
+                locations = cInfo.getCollectLocations();
+                terminateIntakeJob = sourceFeedJoint.getReceivers().size() == 1;
+            } else {
+                locations = cInfo.getComputeLocations();
+                sourceRuntimeType = FeedRuntimeType.COMPUTE;
+            }
+
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDisconnectFeedMessengerRuntime(spec,
+                    connectionId, locations, sourceRuntimeType, completeDisconnect, sourceFeedJoint.getOwnerFeedId());
+
+            feedMessenger = p.first;
+            messengerPc = p.second;
+
+            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
+            NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
+            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
+            spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
+            spec.addRoot(nullSink);
+            return new Pair<JobSpecification, Boolean>(spec, terminateIntakeJob);
+
+        } catch (AlgebricksException e) {
+            throw new AsterixException(e);
+        }
+
+    }
+
+    public static JobSpecification buildPrepareStallMessageJob(PrepareStallMessage stallMessage,
+            Collection<String> collectLocations) throws AsterixException {
+        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
+        try {
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
+                    messageJobSpec, stallMessage.getConnectionId(), stallMessage, collectLocations);
+            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
+        } catch (AlgebricksException ae) {
+            throw new AsterixException(ae);
+        }
+        return messageJobSpec;
+    }
+
+    public static JobSpecification buildNotifyThrottlingEnabledMessageJob(
+            ThrottlingEnabledFeedMessage throttlingEnabledMesg, Collection<String> locations) throws AsterixException {
+        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
+        try {
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
+                    messageJobSpec, throttlingEnabledMesg.getConnectionId(), throttlingEnabledMesg, locations);
+            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
+        } catch (AlgebricksException ae) {
+            throw new AsterixException(ae);
+        }
+        return messageJobSpec;
+    }
+
+    public static JobSpecification buildTerminateFlowMessageJob(TerminateDataFlowMessage terminateMessage,
+            List<String> collectLocations) throws AsterixException {
+        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
+        try {
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
+                    messageJobSpec, terminateMessage.getConnectionId(), terminateMessage, collectLocations);
+            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
+        } catch (AlgebricksException ae) {
+            throw new AsterixException(ae);
+        }
+        return messageJobSpec;
+    }
+
+    public static JobSpecification buildCommitAckResponseJob(FeedTupleCommitResponseMessage commitResponseMessage,
+            Collection<String> targetLocations) throws AsterixException {
+        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
+        try {
+            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
+                    messageJobSpec, commitResponseMessage.getConnectionId(), commitResponseMessage, targetLocations);
+            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
+        } catch (AlgebricksException ae) {
+            throw new AsterixException(ae);
+        }
+        return messageJobSpec;
+    }
+
+    public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDiscontinueFeedMessengerRuntime(
+            JobSpecification jobSpec, FeedId feedId, List<String> locations) throws AlgebricksException {
+        FeedConnectionId feedConnectionId = new FeedConnectionId(feedId, null);
+        IFeedMessage feedMessage = new EndFeedMessage(feedConnectionId, FeedRuntimeType.INTAKE,
+                feedConnectionId.getFeedId(), true, EndFeedMessage.EndMessageType.DISCONTINUE_SOURCE);
+        return buildSendFeedMessageRuntime(jobSpec, feedConnectionId, feedMessage, locations);
+    }
+
+    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildSendFeedMessageRuntime(
+            JobSpecification jobSpec, FeedConnectionId feedConenctionId, IFeedMessage feedMessage,
+            Collection<String> locations) throws AlgebricksException {
+        AlgebricksPartitionConstraint partitionConstraint = new AlgebricksAbsolutePartitionConstraint(
+                locations.toArray(new String[] {}));
+        FeedMessageOperatorDescriptor feedMessenger = new FeedMessageOperatorDescriptor(jobSpec, feedConenctionId,
+                feedMessage);
+        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedMessenger, partitionConstraint);
+    }
+
+    private static JobSpecification buildSendFeedMessageJobSpec(IOperatorDescriptor operatorDescriptor,
+            AlgebricksPartitionConstraint messengerPc, JobSpecification messageJobSpec) {
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, operatorDescriptor,
+                messengerPc);
+        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(messageJobSpec);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, nullSink, messengerPc);
+        messageJobSpec.connect(new OneToOneConnectorDescriptor(messageJobSpec), operatorDescriptor, 0, nullSink, 0);
+        messageJobSpec.addRoot(nullSink);
+        return messageJobSpec;
+    }
+
+    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDisconnectFeedMessengerRuntime(
+            JobSpecification jobSpec, FeedConnectionId feedConenctionId, List<String> locations,
+            FeedRuntimeType sourceFeedRuntimeType, boolean completeDisconnection, FeedId sourceFeedId)
+                    throws AlgebricksException {
+        IFeedMessage feedMessage = new EndFeedMessage(feedConenctionId, sourceFeedRuntimeType, sourceFeedId,
+                completeDisconnection, EndFeedMessage.EndMessageType.DISCONNECT_FEED);
+        return buildSendFeedMessageRuntime(jobSpec, feedConenctionId, feedMessage, locations);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedTrackingManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedTrackingManager.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedTrackingManager.java
new file mode 100644
index 0000000..29230c1
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedTrackingManager.java
@@ -0,0 +1,187 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.util.Arrays;
+import java.util.BitSet;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.external.feed.api.IFeedTrackingManager;
+import org.apache.asterix.external.feed.management.FeedConnectionId;
+import org.apache.asterix.external.feed.message.FeedTupleCommitAckMessage;
+import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedTrackingManager implements IFeedTrackingManager {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedTrackingManager.class.getName());
+
+    private final BitSet allOnes;
+
+    private Map<FeedConnectionId, Map<AckId, BitSet>> ackHistory;
+    private Map<FeedConnectionId, Map<AckId, Integer>> maxBaseAcked;
+
+    public FeedTrackingManager() {
+        byte[] allOneBytes = new byte[128];
+        Arrays.fill(allOneBytes, (byte) 0xff);
+        allOnes = BitSet.valueOf(allOneBytes);
+        ackHistory = new HashMap<FeedConnectionId, Map<AckId, BitSet>>();
+        maxBaseAcked = new HashMap<FeedConnectionId, Map<AckId, Integer>>();
+    }
+
+    @Override
+    public synchronized void submitAckReport(FeedTupleCommitAckMessage ackMessage) {
+        AckId ackId = getAckId(ackMessage);
+        Map<AckId, BitSet> acksForConnection = ackHistory.get(ackMessage.getConnectionId());
+        if (acksForConnection == null) {
+            acksForConnection = new HashMap<AckId, BitSet>();
+            acksForConnection.put(ackId, BitSet.valueOf(ackMessage.getCommitAcks()));
+            ackHistory.put(ackMessage.getConnectionId(), acksForConnection);
+        }
+        BitSet currentAcks = acksForConnection.get(ackId);
+        if (currentAcks == null) {
+            currentAcks = BitSet.valueOf(ackMessage.getCommitAcks());
+            acksForConnection.put(ackId, currentAcks);
+        } else {
+            currentAcks.or(BitSet.valueOf(ackMessage.getCommitAcks()));
+        }
+        if (Arrays.equals(currentAcks.toByteArray(), allOnes.toByteArray())) {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info(ackMessage.getIntakePartition() + " (" + ackMessage.getBase() + ")" + " is convered");
+            }
+            Map<AckId, Integer> maxBaseAckedForConnection = maxBaseAcked.get(ackMessage.getConnectionId());
+            if (maxBaseAckedForConnection == null) {
+                maxBaseAckedForConnection = new HashMap<AckId, Integer>();
+                maxBaseAcked.put(ackMessage.getConnectionId(), maxBaseAckedForConnection);
+            }
+            Integer maxBaseAckedValue = maxBaseAckedForConnection.get(ackId);
+            if (maxBaseAckedValue == null) {
+                maxBaseAckedValue = ackMessage.getBase();
+                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
+                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
+                        ackMessage.getBase());
+            } else if (ackMessage.getBase() == maxBaseAckedValue + 1) {
+                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
+                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
+                        ackMessage.getBase());
+            } else {
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.info("Ignoring discountiuous acked base " + ackMessage.getBase() + " for " + ackId);
+                }
+            }
+
+        } else {
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("AckId " + ackId + " pending number of acks " + (128 * 8 - currentAcks.cardinality()));
+            }
+        }
+    }
+
+    public synchronized void disableTracking(FeedConnectionId connectionId) {
+        ackHistory.remove(connectionId);
+        maxBaseAcked.remove(connectionId);
+    }
+
+    private void sendCommitResponseMessage(FeedConnectionId connectionId, int partition, int base) {
+        FeedTupleCommitResponseMessage response = new FeedTupleCommitResponseMessage(connectionId, partition, base);
+        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
+        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
+        String collectLocation = collectLocations.get(partition);
+        Set<String> messageDestinations = new HashSet<String>();
+        messageDestinations.add(collectLocation);
+        messageDestinations.addAll(storageLocations);
+        try {
+            JobSpecification spec = FeedOperations.buildCommitAckResponseJob(response, messageDestinations);
+            CentralFeedManager.runJob(spec, false);
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.WARNING)) {
+                LOGGER.warning("Unable to send commit response message " + response + " exception " + e.getMessage());
+            }
+        }
+    }
+
+    private static AckId getAckId(FeedTupleCommitAckMessage ackMessage) {
+        return new AckId(ackMessage.getConnectionId(), ackMessage.getIntakePartition(), ackMessage.getBase());
+    }
+
+    private static class AckId {
+        private FeedConnectionId connectionId;
+        private int intakePartition;
+        private int base;
+
+        public AckId(FeedConnectionId connectionId, int intakePartition, int base) {
+            this.connectionId = connectionId;
+            this.intakePartition = intakePartition;
+            this.base = base;
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) {
+                return true;
+            }
+            if (!(o instanceof AckId)) {
+                return false;
+            }
+            AckId other = (AckId) o;
+            return other.getConnectionId().equals(connectionId) && other.getIntakePartition() == intakePartition
+                    && other.getBase() == base;
+        }
+
+        @Override
+        public String toString() {
+            return connectionId + "[" + intakePartition + "]" + "(" + base + ")";
+        }
+
+        @Override
+        public int hashCode() {
+            return toString().hashCode();
+        }
+
+        public FeedConnectionId getConnectionId() {
+            return connectionId;
+        }
+
+        public int getIntakePartition() {
+            return intakePartition;
+        }
+
+        public int getBase() {
+            return base;
+        }
+
+    }
+
+    @Override
+    public void disableAcking(FeedConnectionId connectionId) {
+        ackHistory.remove(connectionId);
+        maxBaseAcked.remove(connectionId);
+        if (LOGGER.isLoggable(Level.WARNING)) {
+            LOGGER.warning("Acking disabled for " + connectionId);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedWorkCollection.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedWorkCollection.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedWorkCollection.java
new file mode 100644
index 0000000..53b9792
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedWorkCollection.java
@@ -0,0 +1,206 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.api.common.SessionConfig;
+import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.aql.translator.QueryTranslator;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.external.feed.api.IFeedWork;
+import org.apache.asterix.external.feed.api.IFeedWorkEventListener;
+import org.apache.asterix.external.feed.management.FeedCollectInfo;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest;
+import org.apache.asterix.external.feed.management.FeedConnectionRequest.ConnectionStatus;
+import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;
+import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.lang.common.statement.DataverseDecl;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.api.job.JobId;
+
+/**
+ * A collection of feed management related task, each represented as an implementation of {@code IFeedWork}.
+ */
+public class FeedWorkCollection {
+
+    private static Logger LOGGER = Logger.getLogger(FeedWorkCollection.class.getName());
+    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
+
+    /**
+     * The task of subscribing to a feed to obtain data.
+     */
+    public static class SubscribeFeedWork implements IFeedWork {
+
+        private final Runnable runnable;
+
+        private final FeedConnectionRequest request;
+
+        @Override
+        public Runnable getRunnable() {
+            return runnable;
+        }
+
+        public SubscribeFeedWork(String[] locations, FeedConnectionRequest request) {
+            this.runnable = new SubscribeFeedWorkRunnable(locations, request);
+            this.request = request;
+        }
+
+        private static class SubscribeFeedWorkRunnable implements Runnable {
+
+            private final FeedConnectionRequest request;
+            private final String[] locations;
+
+            public SubscribeFeedWorkRunnable(String[] locations, FeedConnectionRequest request) {
+                this.request = request;
+                this.locations = locations;
+            }
+
+            @Override
+            public void run() {
+                try {
+                    PrintWriter writer = new PrintWriter(System.out, true);
+                    SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+                    DataverseDecl dataverseDecl = new DataverseDecl(
+                            new Identifier(request.getReceivingFeedId().getDataverse()));
+                    SubscribeFeedStatement subscribeStmt = new SubscribeFeedStatement(locations, request);
+                    List<Statement> statements = new ArrayList<Statement>();
+                    statements.add(dataverseDecl);
+                    statements.add(subscribeStmt);
+                    QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
+                    translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                            QueryTranslator.ResultDelivery.SYNC);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Submitted connection requests for execution: " + request);
+                    }
+                } catch (Exception e) {
+                    if (LOGGER.isLoggable(Level.SEVERE)) {
+                        LOGGER.severe("Exception in executing " + request);
+                    }
+                    throw new RuntimeException(e);
+                }
+            }
+        }
+
+        public static class FeedSubscribeWorkEventListener implements IFeedWorkEventListener {
+
+            @Override
+            public void workFailed(IFeedWork work, Exception e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request
+                            + " failed with exception " + e);
+                }
+            }
+
+            @Override
+            public void workCompleted(IFeedWork work) {
+                ((SubscribeFeedWork) work).request.setSubscriptionStatus(ConnectionStatus.ACTIVE);
+                if (LOGGER.isLoggable(Level.INFO)) {
+                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request + " completed ");
+                }
+            }
+
+        }
+
+        public FeedConnectionRequest getRequest() {
+            return request;
+        }
+
+        @Override
+        public String toString() {
+            return "SubscribeFeedWork for [" + request + "]";
+        }
+
+    }
+
+    /**
+     * The task of activating a set of feeds.
+     */
+    public static class ActivateFeedWork implements IFeedWork {
+
+        private final Runnable runnable;
+
+        @Override
+        public Runnable getRunnable() {
+            return runnable;
+        }
+
+        public ActivateFeedWork(List<FeedCollectInfo> feedsToRevive) {
+            this.runnable = new FeedsActivateRunnable(feedsToRevive);
+        }
+
+        public ActivateFeedWork() {
+            this.runnable = new FeedsActivateRunnable();
+        }
+
+        private static class FeedsActivateRunnable implements Runnable {
+
+            private List<FeedCollectInfo> feedsToRevive;
+            private Mode mode;
+
+            public enum Mode {
+                REVIVAL_POST_NODE_REJOIN
+            }
+
+            public FeedsActivateRunnable(List<FeedCollectInfo> feedsToRevive) {
+                this.feedsToRevive = feedsToRevive;
+            }
+
+            public FeedsActivateRunnable() {
+            }
+
+            @Override
+            public void run() {
+                switch (mode) {
+                    case REVIVAL_POST_NODE_REJOIN:
+                        try {
+                            Thread.sleep(10000);
+                        } catch (InterruptedException e1) {
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Attempt to resume feed interrupted");
+                            }
+                            throw new IllegalStateException(e1.getMessage());
+                        }
+                        for (FeedCollectInfo finfo : feedsToRevive) {
+                            try {
+                                JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
+                                if (LOGGER.isLoggable(Level.INFO)) {
+                                    LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
+                                    LOGGER.info("Job:" + finfo.jobSpec);
+                                }
+                            } catch (Exception e) {
+                                if (LOGGER.isLoggable(Level.WARNING)) {
+                                    LOGGER.warning(
+                                            "Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
+                                }
+                            }
+                        }
+                }
+            }
+
+        }
+
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedWorkRequestResponseHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedWorkRequestResponseHandler.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedWorkRequestResponseHandler.java
new file mode 100644
index 0000000..2dc1162
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedWorkRequestResponseHandler.java
@@ -0,0 +1,269 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.Set;
+import java.util.concurrent.LinkedBlockingQueue;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.common.api.IClusterManagementWork;
+import org.apache.asterix.common.api.IClusterManagementWorkResponse;
+import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
+import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
+import org.apache.asterix.external.feed.watch.FeedJobInfo;
+import org.apache.asterix.metadata.cluster.AddNodeWork;
+import org.apache.asterix.metadata.cluster.AddNodeWorkResponse;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.asterix.om.util.AsterixClusterProperties;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.constraints.Constraint;
+import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
+import org.apache.hyracks.api.constraints.expressions.ConstantExpression;
+import org.apache.hyracks.api.constraints.expressions.ConstraintExpression;
+import org.apache.hyracks.api.constraints.expressions.ConstraintExpression.ExpressionTag;
+import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
+import org.apache.hyracks.api.constraints.expressions.PartitionCountExpression;
+import org.apache.hyracks.api.constraints.expressions.PartitionLocationExpression;
+import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
+import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class FeedWorkRequestResponseHandler implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedWorkRequestResponseHandler.class.getName());
+
+    private final LinkedBlockingQueue<IClusterManagementWorkResponse> inbox;
+
+    private Map<Integer, Map<String, List<FeedJobInfo>>> feedsWaitingForResponse = new HashMap<Integer, Map<String, List<FeedJobInfo>>>();
+
+    public FeedWorkRequestResponseHandler(LinkedBlockingQueue<IClusterManagementWorkResponse> inbox) {
+        this.inbox = inbox;
+    }
+
+    @Override
+    public void run() {
+        while (true) {
+            IClusterManagementWorkResponse response = null;
+            try {
+                response = inbox.take();
+            } catch (InterruptedException e) {
+                if (LOGGER.isLoggable(Level.WARNING)) {
+                    LOGGER.warning("Interrupted exception " + e.getMessage());
+                }
+            }
+            IClusterManagementWork submittedWork = response.getWork();
+            Map<String, String> nodeSubstitution = new HashMap<String, String>();
+            switch (submittedWork.getClusterManagementWorkType()) {
+                case ADD_NODE:
+                    AddNodeWork addNodeWork = (AddNodeWork) submittedWork;
+                    int workId = addNodeWork.getWorkId();
+                    Map<String, List<FeedJobInfo>> failureAnalysis = feedsWaitingForResponse.get(workId);
+                    AddNodeWorkResponse resp = (AddNodeWorkResponse) response;
+                    List<String> nodesAdded = resp.getNodesAdded();
+                    List<String> unsubstitutedNodes = new ArrayList<String>();
+                    unsubstitutedNodes.addAll(addNodeWork.getDeadNodes());
+                    int nodeIndex = 0;
+
+                    /** form a mapping between the failed node and its substitute **/
+                    if (nodesAdded != null && nodesAdded.size() > 0) {
+                        for (String failedNodeId : addNodeWork.getDeadNodes()) {
+                            String substitute = nodesAdded.get(nodeIndex);
+                            nodeSubstitution.put(failedNodeId, substitute);
+                            nodeIndex = (nodeIndex + 1) % nodesAdded.size();
+                            unsubstitutedNodes.remove(failedNodeId);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Node " + substitute + " chosen to substiute lost node " + failedNodeId);
+                            }
+                        }
+                    }
+                    if (unsubstitutedNodes.size() > 0) {
+                        String[] participantNodes = AsterixClusterProperties.INSTANCE.getParticipantNodes()
+                                .toArray(new String[] {});
+                        nodeIndex = 0;
+                        for (String unsubstitutedNode : unsubstitutedNodes) {
+                            nodeSubstitution.put(unsubstitutedNode, participantNodes[nodeIndex]);
+                            if (LOGGER.isLoggable(Level.INFO)) {
+                                LOGGER.info("Node " + participantNodes[nodeIndex] + " chosen to substiute lost node "
+                                        + unsubstitutedNode);
+                            }
+                            nodeIndex = (nodeIndex + 1) % participantNodes.length;
+                        }
+
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Request " + resp.getWork() + " completed using internal nodes");
+                        }
+                    }
+
+                    // alter failed feed intake jobs
+
+                    for (Entry<String, List<FeedJobInfo>> entry : failureAnalysis.entrySet()) {
+                        String failedNode = entry.getKey();
+                        List<FeedJobInfo> impactedJobInfos = entry.getValue();
+                        for (FeedJobInfo info : impactedJobInfos) {
+                            JobSpecification spec = info.getSpec();
+                            replaceNode(spec, failedNode, nodeSubstitution.get(failedNode));
+                            info.setSpec(spec);
+                        }
+                    }
+
+                    Set<FeedIntakeInfo> revisedIntakeJobs = new HashSet<FeedIntakeInfo>();
+                    Set<FeedConnectJobInfo> revisedConnectJobInfos = new HashSet<FeedConnectJobInfo>();
+
+                    for (List<FeedJobInfo> infos : failureAnalysis.values()) {
+                        for (FeedJobInfo info : infos) {
+                            switch (info.getJobType()) {
+                                case INTAKE:
+                                    revisedIntakeJobs.add((FeedIntakeInfo) info);
+                                    break;
+                                case FEED_CONNECT:
+                                    revisedConnectJobInfos.add((FeedConnectJobInfo) info);
+                                    break;
+                            }
+                        }
+                    }
+
+                    IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+                    try {
+                        for (FeedIntakeInfo info : revisedIntakeJobs) {
+                            hcc.startJob(info.getSpec());
+                        }
+                        Thread.sleep(2000);
+                        for (FeedConnectJobInfo info : revisedConnectJobInfos) {
+                            hcc.startJob(info.getSpec());
+                            Thread.sleep(2000);
+                        }
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to start revised job post failure");
+                        }
+                    }
+
+                    break;
+                case REMOVE_NODE:
+                    throw new IllegalStateException("Invalid work submitted");
+            }
+        }
+    }
+
+    private void replaceNode(JobSpecification jobSpec, String failedNodeId, String replacementNode) {
+        Set<Constraint> userConstraints = jobSpec.getUserConstraints();
+        List<Constraint> locationConstraintsToReplace = new ArrayList<Constraint>();
+        List<Constraint> countConstraintsToReplace = new ArrayList<Constraint>();
+        List<OperatorDescriptorId> modifiedOperators = new ArrayList<OperatorDescriptorId>();
+        Map<OperatorDescriptorId, List<Constraint>> candidateConstraints = new HashMap<OperatorDescriptorId, List<Constraint>>();
+        Map<OperatorDescriptorId, Map<Integer, String>> newConstraints = new HashMap<OperatorDescriptorId, Map<Integer, String>>();
+        OperatorDescriptorId opId = null;
+        for (Constraint constraint : userConstraints) {
+            LValueConstraintExpression lexpr = constraint.getLValue();
+            ConstraintExpression cexpr = constraint.getRValue();
+            switch (lexpr.getTag()) {
+                case PARTITION_COUNT:
+                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
+                    if (modifiedOperators.contains(opId)) {
+                        countConstraintsToReplace.add(constraint);
+                    } else {
+                        List<Constraint> clist = candidateConstraints.get(opId);
+                        if (clist == null) {
+                            clist = new ArrayList<Constraint>();
+                            candidateConstraints.put(opId, clist);
+                        }
+                        clist.add(constraint);
+                    }
+                    break;
+                case PARTITION_LOCATION:
+                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
+                    String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
+                    if (oldLocation.equals(failedNodeId)) {
+                        locationConstraintsToReplace.add(constraint);
+                        modifiedOperators.add(((PartitionLocationExpression) lexpr).getOperatorDescriptorId());
+                        Map<Integer, String> newLocs = newConstraints.get(opId);
+                        if (newLocs == null) {
+                            newLocs = new HashMap<Integer, String>();
+                            newConstraints.put(opId, newLocs);
+                        }
+                        int partition = ((PartitionLocationExpression) lexpr).getPartition();
+                        newLocs.put(partition, replacementNode);
+                    } else {
+                        if (modifiedOperators.contains(opId)) {
+                            locationConstraintsToReplace.add(constraint);
+                            Map<Integer, String> newLocs = newConstraints.get(opId);
+                            if (newLocs == null) {
+                                newLocs = new HashMap<Integer, String>();
+                                newConstraints.put(opId, newLocs);
+                            }
+                            int partition = ((PartitionLocationExpression) lexpr).getPartition();
+                            newLocs.put(partition, oldLocation);
+                        } else {
+                            List<Constraint> clist = candidateConstraints.get(opId);
+                            if (clist == null) {
+                                clist = new ArrayList<Constraint>();
+                                candidateConstraints.put(opId, clist);
+                            }
+                            clist.add(constraint);
+                        }
+                    }
+                    break;
+                default:
+                    break;
+            }
+        }
+
+        jobSpec.getUserConstraints().removeAll(locationConstraintsToReplace);
+        jobSpec.getUserConstraints().removeAll(countConstraintsToReplace);
+
+        for (OperatorDescriptorId mopId : modifiedOperators) {
+            List<Constraint> clist = candidateConstraints.get(mopId);
+            if (clist != null && !clist.isEmpty()) {
+                jobSpec.getUserConstraints().removeAll(clist);
+
+                for (Constraint c : clist) {
+                    if (c.getLValue().getTag().equals(ExpressionTag.PARTITION_LOCATION)) {
+                        ConstraintExpression cexpr = c.getRValue();
+                        int partition = ((PartitionLocationExpression) c.getLValue()).getPartition();
+                        String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
+                        newConstraints.get(mopId).put(partition, oldLocation);
+                    }
+                }
+            }
+        }
+
+        for (Entry<OperatorDescriptorId, Map<Integer, String>> entry : newConstraints.entrySet()) {
+            OperatorDescriptorId nopId = entry.getKey();
+            Map<Integer, String> clist = entry.getValue();
+            IOperatorDescriptor op = jobSpec.getOperatorMap().get(nopId);
+            String[] locations = new String[clist.size()];
+            for (int i = 0; i < locations.length; i++) {
+                locations[i] = clist.get(i);
+            }
+            PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, op, locations);
+        }
+
+    }
+
+    public void registerFeedWork(int workId, Map<String, List<FeedJobInfo>> impactedJobs) {
+        feedsWaitingForResponse.put(workId, impactedJobs);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/FeedsActivator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/FeedsActivator.java b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedsActivator.java
new file mode 100644
index 0000000..5a6d28e
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/FeedsActivator.java
@@ -0,0 +1,118 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.io.PrintWriter;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import org.apache.asterix.api.common.SessionConfig;
+import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.aql.translator.QueryTranslator;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.external.feed.management.FeedCollectInfo;
+import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.lang.common.statement.ConnectFeedStatement;
+import org.apache.asterix.lang.common.statement.DataverseDecl;
+import org.apache.asterix.lang.common.struct.Identifier;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.api.job.JobId;
+
+public class FeedsActivator implements Runnable {
+
+    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
+    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
+
+    private List<FeedCollectInfo> feedsToRevive;
+    private Mode mode;
+
+    public enum Mode {
+        REVIVAL_POST_CLUSTER_REBOOT,
+        REVIVAL_POST_NODE_REJOIN
+    }
+
+    public FeedsActivator() {
+        this.mode = Mode.REVIVAL_POST_CLUSTER_REBOOT;
+    }
+
+    public FeedsActivator(List<FeedCollectInfo> feedsToRevive) {
+        this.feedsToRevive = feedsToRevive;
+        this.mode = Mode.REVIVAL_POST_NODE_REJOIN;
+    }
+
+    @Override
+    public void run() {
+        switch (mode) {
+            case REVIVAL_POST_CLUSTER_REBOOT:
+                //revivePostClusterReboot();
+                break;
+            case REVIVAL_POST_NODE_REJOIN:
+                try {
+                    Thread.sleep(10000);
+                } catch (InterruptedException e1) {
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Attempt to resume feed interrupted");
+                    }
+                    throw new IllegalStateException(e1.getMessage());
+                }
+                for (FeedCollectInfo finfo : feedsToRevive) {
+                    try {
+                        JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
+                        if (LOGGER.isLoggable(Level.INFO)) {
+                            LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
+                            LOGGER.info("Job:" + finfo.jobSpec);
+                        }
+                    } catch (Exception e) {
+                        if (LOGGER.isLoggable(Level.WARNING)) {
+                            LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
+                        }
+                    }
+                }
+        }
+    }
+
+    public void reviveFeed(String dataverse, String feedName, String dataset, String feedPolicy) {
+        PrintWriter writer = new PrintWriter(System.out, true);
+        SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
+        try {
+            DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(dataverse));
+            ConnectFeedStatement stmt = new ConnectFeedStatement(new Identifier(dataverse), new Identifier(feedName),
+                    new Identifier(dataset), feedPolicy, 0);
+            stmt.setForceConnect(true);
+            List<Statement> statements = new ArrayList<Statement>();
+            statements.add(dataverseDecl);
+            statements.add(stmt);
+            QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
+            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                    QueryTranslator.ResultDelivery.SYNC);
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Resumed feed: " + dataverse + ":" + dataset + " using policy " + feedPolicy);
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Exception in resuming loser feed: " + dataverse + ":" + dataset + " using policy "
+                        + feedPolicy + " Exception " + e.getMessage());
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
index 46ea72b..9f024e9 100644
--- a/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
+++ b/asterix-app/src/main/java/org/apache/asterix/aql/translator/QueryTranslator.java
@@ -42,6 +42,11 @@ import java.util.logging.Logger;
 import org.apache.asterix.api.common.APIFramework;
 import org.apache.asterix.api.common.SessionConfig;
 import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.app.external.CentralFeedManager;
+import org.apache.asterix.app.external.ExternalIndexingOperations;
+import org.apache.asterix.app.external.FeedJoint;
+import org.apache.asterix.app.external.FeedLifecycleListener;
+import org.apache.asterix.app.external.FeedOperations;
 import org.apache.asterix.common.config.AsterixExternalProperties;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
@@ -69,13 +74,8 @@ import org.apache.asterix.external.feed.management.FeedLifecycleEventSubscriber;
 import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
 import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
 import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.feed.CentralFeedManager;
-import org.apache.asterix.feed.FeedJoint;
-import org.apache.asterix.feed.FeedLifecycleListener;
 import org.apache.asterix.file.DatasetOperations;
 import org.apache.asterix.file.DataverseOperations;
-import org.apache.asterix.file.ExternalIndexingOperations;
-import org.apache.asterix.file.FeedOperations;
 import org.apache.asterix.file.IndexOperations;
 import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
 import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/CentralFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/CentralFeedManager.java b/asterix-app/src/main/java/org/apache/asterix/feed/CentralFeedManager.java
deleted file mode 100644
index 4020bde..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/CentralFeedManager.java
+++ /dev/null
@@ -1,110 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.io.IOException;
-import java.io.PrintWriter;
-import java.io.StringReader;
-import java.util.List;
-
-import org.apache.asterix.api.common.SessionConfig;
-import org.apache.asterix.api.common.SessionConfig.OutputFormat;
-import org.apache.asterix.aql.translator.QueryTranslator;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.compiler.provider.AqlCompilationProvider;
-import org.apache.asterix.compiler.provider.ILangCompilationProvider;
-import org.apache.asterix.external.feed.api.ICentralFeedManager;
-import org.apache.asterix.external.feed.api.IFeedLoadManager;
-import org.apache.asterix.external.feed.api.IFeedTrackingManager;
-import org.apache.asterix.external.feed.message.SocketMessageListener;
-import org.apache.asterix.lang.aql.parser.AQLParserFactory;
-import org.apache.asterix.lang.common.base.IParser;
-import org.apache.asterix.lang.common.base.IParserFactory;
-import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class CentralFeedManager implements ICentralFeedManager {
-
-    private static final ICentralFeedManager centralFeedManager = new CentralFeedManager();
-    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
-
-    public static ICentralFeedManager getInstance() {
-        return centralFeedManager;
-    }
-
-    private final int port;
-    private final IFeedLoadManager feedLoadManager;
-    private final IFeedTrackingManager feedTrackingManager;
-    private final SocketMessageListener messageListener;
-
-    private CentralFeedManager() {
-        this.port = AsterixAppContextInfo.getInstance().getFeedProperties().getFeedCentralManagerPort();
-        this.feedLoadManager = new FeedLoadManager();
-        this.feedTrackingManager = new FeedTrackingManager();
-        this.messageListener = new SocketMessageListener(port, new FeedMessageReceiver(this));
-    }
-
-    @Override
-    public void start() throws AsterixException {
-        messageListener.start();
-    }
-
-    @Override
-    public void stop() throws AsterixException, IOException {
-        messageListener.stop();
-    }
-
-    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobId jobId = hcc.startJob(spec);
-        if (waitForCompletion) {
-            hcc.waitForCompletion(jobId);
-        }
-        return jobId;
-    }
-
-    @Override
-    public IFeedLoadManager getFeedLoadManager() {
-        return feedLoadManager;
-    }
-
-    @Override
-    public IFeedTrackingManager getFeedTrackingManager() {
-        return feedTrackingManager;
-    }
-
-    public static class AQLExecutor {
-
-        private static final PrintWriter out = new PrintWriter(System.out, true);
-        private static final IParserFactory parserFactory = new AQLParserFactory();
-
-        public static void executeAQL(String aql) throws Exception {
-            IParser parser = parserFactory.createParser(new StringReader(aql));
-            List<Statement> statements = parser.parse();
-            SessionConfig pc = new SessionConfig(out, OutputFormat.ADM);
-            QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
-            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                    QueryTranslator.ResultDelivery.SYNC);
-        }
-    }
-
-}


[13/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdUnParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdUnParser.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdUnParser.java
new file mode 100644
index 0000000..4689612
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdUnParser.java
@@ -0,0 +1,492 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.asterix.external.classad.Value.NumberFactor;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ClassAdUnParser {
+
+    // table of string representation of operators
+    public static final String[] opString = { "", " < ", " <= ", " != ", " == ", " >= ", " > ", " is ", " isnt ", " +",
+            " -", " + ", " - ", " * ", " / ", " % ", " !", " || ", " && ", " ~", " | ", " ^ ", " & ", " << ", " >> ",
+            " >>> ", " () ", " [] ", " ?: " };
+    protected static char delimiter = '\"';
+
+    /// Constructor
+    public ClassAdUnParser() {
+    }
+
+    // The default delimiter for strings is '\"'
+    // This can be changed to '\'' to unparse quoted attributes, with this function
+    public void setDelimiter(char delim) {
+        delimiter = delim;
+    }
+
+    /**
+     * Unparse a value
+     *
+     * @param buffer
+     *            The string to unparse to
+     * @param val
+     *            The value to unparse
+     * @throws HyracksDataException
+     */
+    public void unparse(AMutableCharArrayString buffer, Value val) throws HyracksDataException {
+        switch (val.getType()) {
+            case NULL_VALUE:
+                buffer.appendString("(null-value)");
+                break;
+
+            case STRING_VALUE: {
+                AMutableCharArrayString s = new AMutableCharArrayString();
+                val.isStringValue(s);
+                buffer.appendChar('"');
+                for (int i = 0; i < s.getLength(); i++) {
+                    char ch = s.charAt(i);
+                    if (ch == delimiter) {
+                        if (delimiter == '\"') {
+                            buffer.appendString("\\\"");
+                            continue;
+                        } else {
+                            buffer.appendString("\\\'");
+                            continue;
+                        }
+                    }
+                    switch (ch) {
+                        case '\b':
+                            buffer.appendString("\\b");
+                            continue;
+                        case '\f':
+                            buffer.appendString("\\f");
+                            continue;
+                        case '\n':
+                            buffer.appendString("\\n");
+                            continue;
+                        case '\r':
+                            buffer.appendString("\\r");
+                            continue;
+                        case '\t':
+                            buffer.appendString("\\t");
+                            continue;
+                        case '\\':
+                            buffer.appendString("\\\\");
+                            continue;
+                        case '\'':
+                            buffer.appendString("\'");
+                            continue;
+                        case '\"':
+                            buffer.appendString("\"");
+                            continue;
+                        default:
+                            if (Character.isISOControl(ch)) {
+                                // print octal representation
+                                buffer.appendString(String.format("\\%03o", ch));
+                                continue;
+                            }
+                            break;
+                    }
+
+                    buffer.appendChar(ch);
+                }
+                buffer.appendChar('"');
+                return;
+            }
+            case INTEGER_VALUE: {
+                AMutableInt64 i = new AMutableInt64(0);
+                val.isIntegerValue(i);
+                buffer.appendString(String.valueOf(i.getLongValue()));
+                return;
+            }
+            case REAL_VALUE: {
+                AMutableDouble real = new AMutableDouble(0);
+                val.isRealValue(real);
+                if (real.getDoubleValue() == 0.0) {
+                    // It might be positive or negative and it's
+                    // hard to tell. printf is good at telling though.
+                    // We also want to print it with as few
+                    // digits as possible, which is why we don't use the
+                    // case below.
+                    buffer.appendString(String.valueOf(real.getDoubleValue()));
+                } else if (Util.isNan(real.getDoubleValue())) {
+                    buffer.appendString("real(\"NaN\")");
+                } else if (Util.isInf(real.getDoubleValue()) == -1) {
+                    buffer.appendString("real(\"-INF\")");
+                } else if (Util.isInf(real.getDoubleValue()) == 1) {
+                    buffer.appendString("real(\"INF\")");
+                } else {
+                    buffer.appendString(String.format("%1.15E", real.getDoubleValue()));
+                }
+                return;
+            }
+            case BOOLEAN_VALUE: {
+                MutableBoolean b = new MutableBoolean();
+                val.isBooleanValue(b);
+                buffer.appendString(b.booleanValue() ? "true" : "false");
+                return;
+            }
+            case UNDEFINED_VALUE: {
+                buffer.appendString("undefined");
+                return;
+            }
+            case ERROR_VALUE: {
+                buffer.appendString("error");
+                return;
+            }
+            case ABSOLUTE_TIME_VALUE: {
+                ClassAdTime asecs = new ClassAdTime();
+                val.isAbsoluteTimeValue(asecs);
+
+                buffer.appendString("absTime(\"");
+                Util.absTimeToString(asecs, buffer);
+                buffer.appendString("\")");
+                return;
+            }
+            case RELATIVE_TIME_VALUE: {
+                ClassAdTime rsecs = new ClassAdTime();
+                val.isRelativeTimeValue(rsecs);
+                buffer.appendString("relTime(\"");
+                Util.relTimeToString(rsecs.getRelativeTime(), buffer);
+                buffer.appendString("\")");
+
+                return;
+            }
+            case CLASSAD_VALUE: {
+                ClassAd ad = new ClassAd();
+                Map<CaseInsensitiveString, ExprTree> attrs = new HashMap<CaseInsensitiveString, ExprTree>();
+                val.isClassAdValue(ad);
+                ad.getComponents(attrs);
+                unparseAux(buffer, attrs);
+                return;
+            }
+            case SLIST_VALUE:
+            case LIST_VALUE: {
+                ExprList el = new ExprList();
+                val.isListValue(el);
+                unparseAux(buffer, el);
+                return;
+            }
+        }
+    }
+
+    /**
+     * Unparse an expression
+     *
+     * @param buffer
+     *            The string to unparse to
+     * @param expr
+     *            The expression to unparse
+     * @throws HyracksDataException
+     */
+    public void unparse(AMutableCharArrayString buffer, ExprTree tree) throws HyracksDataException {
+        if (tree == null) {
+            buffer.appendString("<error:null expr>");
+            return;
+        }
+
+        switch (tree.getKind()) {
+            case LITERAL_NODE: { // value
+                Value val = new Value();
+                AMutableNumberFactor factor = new AMutableNumberFactor();
+                ((Literal) tree.self()).getComponents(val, factor);
+                unparseAux(buffer, val, factor.getFactor());
+                return;
+            }
+
+            case ATTRREF_NODE: { // string
+                ExprTreeHolder expr = new ExprTreeHolder(); //needs initialization
+                AMutableCharArrayString ref = new AMutableCharArrayString();
+                MutableBoolean absolute = new MutableBoolean();
+                ((AttributeReference) tree.self()).getComponents(expr, ref, absolute);
+                unparseAux(buffer, expr, ref, absolute.booleanValue());
+                return;
+            }
+
+            case OP_NODE: { //string
+                AMutableInt32 op = new AMutableInt32(0);
+                ExprTreeHolder t1 = new ExprTreeHolder();
+                ExprTreeHolder t2 = new ExprTreeHolder();
+                ExprTreeHolder t3 = new ExprTreeHolder();
+                ((Operation) tree.self()).getComponents(op, t1, t2, t3);
+                unparseAux(buffer, op.getIntegerValue().intValue(), t1, t2, t3);
+                return;
+            }
+
+            case FN_CALL_NODE: { // string
+                AMutableCharArrayString fnName = new AMutableCharArrayString();
+                ExprList args = new ExprList();
+                ((FunctionCall) tree.self()).getComponents(fnName, args);
+                unparseAux(buffer, fnName, args);
+                return;
+            }
+
+            case CLASSAD_NODE: { // nested record
+                Map<CaseInsensitiveString, ExprTree> attrs = new HashMap<CaseInsensitiveString, ExprTree>();
+                ((ClassAd) tree.self()).getComponents(attrs);
+                unparseAux(buffer, attrs);
+                return;
+            }
+            case EXPR_LIST_NODE: { // list
+                ExprList exprs = new ExprList();
+                ((ExprList) tree.self()).getComponents(exprs);
+                unparseAux(buffer, exprs);
+                return;
+            }
+
+            default:
+                // I really wonder whether we should except here, but I
+                // don't want to do that without further consultation.
+                // wenger 2003-12-11.
+                buffer.setValue("");
+                throw new HyracksDataException("unknown expression type");
+        }
+    }
+
+    private void unparseAux(AMutableCharArrayString buffer, AMutableCharArrayString fnName, ExprList args)
+            throws HyracksDataException {
+        buffer.appendString(fnName);
+        buffer.appendChar('(');
+        for (ExprTree tree : args.getExprList()) {
+            unparse(buffer, tree);
+            buffer.appendChar(',');
+        }
+        if (args.size() > 0) {
+            buffer.decrementLength();
+        }
+        buffer.appendChar(')');
+
+    }
+
+    public void unparseAux(AMutableCharArrayString buffer, final Value value, NumberFactor numFactor)
+            throws HyracksDataException {
+        unparse(buffer, value);
+        if ((value.isIntegerValue() || value.isRealValue()) && numFactor != NumberFactor.NO_FACTOR) {
+            buffer.appendChar((numFactor == NumberFactor.B_FACTOR) ? 'B'
+                    : (numFactor == NumberFactor.K_FACTOR) ? 'K'
+                            : (numFactor == NumberFactor.M_FACTOR) ? 'M'
+                                    : (numFactor == NumberFactor.G_FACTOR) ? 'G'
+                                            : (numFactor == NumberFactor.T_FACTOR) ? 'T' : '?');
+            if (buffer.charAt(buffer.getLength() - 1) == '?') {
+                buffer.reset();
+                throw new HyracksDataException("bad number factor");
+            }
+        }
+        return;
+    }
+
+    /**
+     * @param buffer
+     * @param tree
+     * @param ref
+     * @param absolute
+     *            = false if ommitted
+     * @throws HyracksDataException
+     */
+    public void unparseAux(AMutableCharArrayString buffer, final ExprTree tree, AMutableCharArrayString ref,
+            boolean absolute) throws HyracksDataException {
+
+        if (tree != null && tree.self() != null) {
+            unparse(buffer, tree);
+            buffer.appendChar('.');
+            buffer.appendString(ref);
+            return;
+        }
+        if (absolute) {
+            buffer.appendChar('.');
+        }
+        unparseAux(buffer, ref);
+    };
+
+    public void unparseAux(AMutableCharArrayString buffer, final ExprTree tree, AMutableCharArrayString ref)
+            throws HyracksDataException {
+        unparseAux(buffer, tree, ref, false);
+    };
+
+    public void unparseAuxPairs(AMutableCharArrayString buffer, List<Entry<AMutableCharArrayString, ExprTree>> attrlist)
+            throws HyracksDataException {
+        String delim = "; "; // NAC
+        buffer.appendString("[ ");
+        for (Entry<AMutableCharArrayString, ExprTree> entry : attrlist) {
+            unparseAux(buffer, entry.getKey());
+            buffer.appendString(" = ");
+            unparse(buffer, entry.getValue());
+            buffer.appendString(delim);
+        }
+        //get rid of last delimiter
+        buffer.setLength(buffer.getLength() - delim.length());
+        buffer.appendString(" ]");
+    }
+
+    // to unparse attribute names (quoted & unquoted attributes)
+    public void unparseAux(AMutableCharArrayString buffer, AMutableCharArrayString identifier)
+            throws HyracksDataException {
+        Value val = new Value();
+        AMutableCharArrayString idstr = new AMutableCharArrayString();
+
+        val.setStringValue(identifier);
+        setDelimiter('\''); // change the delimiter from string-literal mode to quoted attribute mode
+        unparse(idstr, val);
+        setDelimiter('\"'); // set delimiter back to default setting
+        idstr.erase(0, 1);
+        idstr.erase(idstr.length() - 1, 1);
+        if (identifierNeedsQuoting(idstr)) {
+            idstr.insert(0, "'");
+            idstr.appendString("'");
+        }
+        buffer.appendString(idstr);
+    }
+
+    static boolean identifierNeedsQuoting(AMutableCharArrayString aString) {
+        return false;
+    }
+
+    public void unparseAux(AMutableCharArrayString buffer, Value val, AMutableNumberFactor factor)
+            throws HyracksDataException {
+        unparse(buffer, val);
+        if ((val.isIntegerValue() || val.isRealValue()) && factor.getFactor() != NumberFactor.NO_FACTOR) {
+            buffer.appendString((factor.getFactor() == NumberFactor.B_FACTOR) ? "B"
+                    : (factor.getFactor() == NumberFactor.K_FACTOR) ? "K"
+                            : (factor.getFactor() == NumberFactor.M_FACTOR) ? "M"
+                                    : (factor.getFactor() == NumberFactor.G_FACTOR) ? "G"
+                                            : (factor.getFactor() == NumberFactor.T_FACTOR) ? "T"
+                                                    : "<error:bad factor>");
+        }
+        return;
+    }
+
+    public void unparseAux(AMutableCharArrayString buffer, ExprTree expr, String attrName, boolean absolute)
+            throws HyracksDataException {
+        if (expr != null) {
+            unparse(buffer, expr);
+            buffer.appendString("." + attrName);
+            return;
+        }
+        if (absolute)
+            buffer.appendChar('.');
+        unparseAux(buffer, attrName);
+    }
+
+    public void unparseAux(AMutableCharArrayString buffer, int op, ExprTreeHolder t1, ExprTreeHolder t2,
+            ExprTreeHolder t3) throws HyracksDataException {
+        // case 0: parentheses op
+        if (op == Operation.OpKind_PARENTHESES_OP) {
+            buffer.appendString("( ");
+            unparse(buffer, t1);
+            buffer.appendString(" )");
+            return;
+        }
+        // case 1: check for unary ops
+        if (op == Operation.OpKind_UNARY_PLUS_OP || op == Operation.OpKind_UNARY_MINUS_OP
+                || op == Operation.OpKind_LOGICAL_NOT_OP || op == Operation.OpKind_BITWISE_NOT_OP) {
+            buffer.appendString(opString[op]);
+            unparse(buffer, t1);
+            return;
+        }
+        // case 2: check for ternary op
+        if (op == Operation.OpKind_TERNARY_OP) {
+            unparse(buffer, t1);
+            buffer.appendString(" ? ");
+            unparse(buffer, t2);
+            buffer.appendString(" : ");
+            unparse(buffer, t3);
+            return;
+        }
+        // case 3: check for subscript op
+        if (op == Operation.OpKind_SUBSCRIPT_OP) {
+            unparse(buffer, t1);
+            buffer.appendChar('[');
+            unparse(buffer, t2);
+            buffer.appendChar(']');
+            return;
+        }
+
+        // all others are binary ops
+        unparse(buffer, t1);
+        buffer.appendString(opString[op]);
+        unparse(buffer, t2);
+    }
+
+    public void UnparseAux(AMutableCharArrayString buffer, String fnName, ExprList args) throws HyracksDataException {
+        buffer.appendString(fnName + "(");
+        for (ExprTree tree : args.getExprList()) {
+            unparse(buffer, tree);
+            buffer.appendChar(',');
+        }
+        buffer.setChar(buffer.getLength() - 1, ')');
+    }
+
+    public void unparseAux(AMutableCharArrayString buffer, Map<CaseInsensitiveString, ExprTree> attrs)
+            throws HyracksDataException {
+
+        String delim = "; "; // NAC
+
+        buffer.appendString("[ ");
+
+        for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
+            unparseAux(buffer, entry.getKey().get());
+            buffer.appendString(" = ");
+            unparse(buffer, entry.getValue());
+            buffer.appendString(delim); // NAC
+        }
+        buffer.setLength(buffer.getLength() - delim.length());
+        buffer.appendString(" ]");
+
+    }
+
+    public void unparseAux(AMutableCharArrayString buffer, ExprList exprs) throws HyracksDataException {
+
+        buffer.appendString("{ ");
+        for (ExprTree expr : exprs.getExprList()) {
+            unparse(buffer, expr);
+            buffer.appendChar(',');
+        }
+        buffer.decrementLength();
+        buffer.appendString(" }");
+    }
+
+    /* To unparse the identifier strings
+     * based on the character content,
+     * it's unparsed either as a quoted attribute or non-quoted attribute
+     */
+    public void unparseAux(AMutableCharArrayString buffer, String identifier) throws HyracksDataException {
+        Value val = new Value();
+        AMutableCharArrayString idstr = new AMutableCharArrayString();
+
+        val.setStringValue(identifier);
+        setDelimiter('\''); // change the delimiter from string-literal mode to quoted attribute mode
+        unparse(idstr, val);
+        setDelimiter('\"'); // set delimiter back to default setting
+        idstr.erase(0, 1);
+        idstr.erase(idstr.length() - 1, 1);
+        if (identifierNeedsQuoting(idstr)) {
+            idstr.prependChar('\'');
+            idstr.appendChar('\'');
+        }
+        buffer.appendString(idstr);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Common.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Common.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Common.java
new file mode 100644
index 0000000..b3c027b
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Common.java
@@ -0,0 +1,66 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.nio.charset.StandardCharsets;
+
+public class Common {
+    public static final String ATTR_AD = "Ad";
+    public static final String ATTR_CONTEXT = "Context";
+    public static final String ATTR_DEEP_MODS = "DeepMods";
+    public static final String ATTR_DELETE_AD = "DeleteAd";
+    public static final String ATTR_DELETES = "Deletes";
+    public static final String ATTR_KEY = "Key";
+    public static final String ATTR_NEW_AD = "NewAd";
+    public static final String ATTR_OP_TYPE = "OpType";
+    public static final String ATTR_PARENT_VIEW_NAME = "ParentViewName";
+    public static final String ATTR_PARTITION_EXPRS = "PartitionExprs";
+    public static final String ATTR_PARTITIONED_VIEWS = "PartitionedViews";
+    public static final String ATTR_PROJECT_THROUGH = "ProjectThrough";
+    public static final String ATTR_RANK_HINTS = "RankHints";
+    public static final String ATTR_REPLACE = "Replace";
+    public static final String ATTR_SUBORDINATE_VIEWS = "SubordinateViews";
+    public static final String ATTR_UPDATES = "Updates";
+    public static final String ATTR_WANT_LIST = "WantList";
+    public static final String ATTR_WANT_PRELUDE = "WantPrelude";
+    public static final String ATTR_WANT_RESULTS = "WantResults";
+    public static final String ATTR_WANT_POSTLUDE = "WantPostlude";
+    public static final String ATTR_VIEW_INFO = "ViewInfo";
+    public static final String ATTR_VIEW_NAME = "ViewName";
+    public static final String ATTR_XACTION_NAME = "XactionName";
+    public static final String ATTR_REQUIREMENTS = "Requirements";
+    public static final String ATTR_RANK = "Rank";
+
+    public static class CaseIgnLTStr {
+        public static boolean call(String s1, String s2) {
+            return (s1.compareToIgnoreCase(s2) < 0);
+        }
+    };
+
+    public static class ClassadAttrNameHash {
+        public static int call(String s) {
+            int h = 0;
+            byte[] bytes = s.getBytes(StandardCharsets.UTF_8);
+            for (byte ch : bytes) {
+                h = 5 * h + (ch | 0x20);
+            }
+            return h;
+        }
+    };
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/EvalState.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/EvalState.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/EvalState.java
new file mode 100644
index 0000000..0719fd8
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/EvalState.java
@@ -0,0 +1,120 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+public class EvalState {
+
+    private int depthRemaining; // max recursion depth - current depth
+    // Normally, rootAd will be the ClassAd at the root of the tree
+    // of ExprTrees in the current evaluation. That is, the parent
+    // scope whose parent scope is NULL.
+    // It can be set to a closer parent scope. Then that ClassAd is
+    // treated like it has no parent scope for LookupInScope() and
+    // Evaluate().
+    private ClassAd rootAd;
+    private ClassAd curAd;
+    private boolean flattenAndInline; // NAC
+    private boolean inAttrRefScope;
+
+    public boolean isInAttrRefScope() {
+        return inAttrRefScope;
+    }
+
+    public void setFlattenAndInline(boolean flattenAndInline) {
+        this.flattenAndInline = flattenAndInline;
+    }
+
+    public void setInAttrRefScope(boolean inAttrRefScope) {
+        this.inAttrRefScope = inAttrRefScope;
+    }
+
+    public EvalState() {
+        rootAd = new ClassAd();
+        curAd = new ClassAd();
+        depthRemaining = ExprTree.MAX_CLASSAD_RECURSION;
+        flattenAndInline = false; // NAC
+        inAttrRefScope = false;
+    }
+
+    public boolean isFlattenAndInline() {
+        return flattenAndInline;
+    }
+
+    public void setScopes(ClassAd curScope) {
+        curAd = curScope;
+        setRootScope();
+    }
+
+    public void setRootScope() {
+        ClassAd prevScope = curAd;
+        if (curAd == null) {
+            rootAd = null;
+        } else {
+            ClassAd curScope = curAd.getParentScope();
+
+            while (curScope != null) {
+                if (curScope == curAd) { // NAC - loop detection
+                    rootAd = null;
+                    return; // NAC
+                } // NAC
+                prevScope = curScope;
+                curScope = curScope.getParentScope();
+            }
+
+            rootAd = prevScope;
+        }
+        return;
+    }
+
+    public void reset() {
+        rootAd.reset();
+        curAd.reset();
+        depthRemaining = ExprTree.MAX_CLASSAD_RECURSION;
+        flattenAndInline = false;
+        inAttrRefScope = false;
+    }
+
+    public ClassAd getRootAd() {
+        return rootAd;
+    }
+
+    public ClassAd getCurAd() {
+        return curAd;
+    }
+
+    public void setCurAd(ClassAd curAd) {
+        this.curAd = curAd;
+    }
+
+    public int getDepthRemaining() {
+        return depthRemaining;
+    }
+
+    public void decrementDepth() {
+        depthRemaining--;
+    }
+
+    public void incrementDepth() {
+        depthRemaining++;
+    }
+
+    public void setRootAd(ClassAd classAd) {
+        this.rootAd = classAd;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprList.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprList.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprList.java
new file mode 100644
index 0000000..13ef3c1
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprList.java
@@ -0,0 +1,280 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ExprList extends ExprTree {
+
+    private List<ExprTree> exprList;
+    private EvalState state = new EvalState();
+    public boolean isShared = false;
+
+    public boolean copyFrom(ExprList exprList) throws HyracksDataException {
+        this.exprList.clear();
+        for (ExprTree expr : exprList.exprList) {
+            this.exprList.add(expr.copy());
+        }
+        return true;
+    }
+
+    public int getlast() {
+        return exprList == null ? 0 : exprList.size() - 1;
+    }
+
+    public ExprTree get(int i) {
+        return exprList.get(i);
+    }
+
+    @Override
+    public int size() {
+        return exprList == null ? 0 : exprList.size();
+    }
+
+    // called from FunctionCall
+    @Override
+    public void privateSetParentScope(ClassAd scope) {
+        for (ExprTree tree : exprList) {
+            tree.setParentScope(scope);
+        }
+    }
+
+    @Override
+    public NodeKind getKind() {
+        return NodeKind.EXPR_LIST_NODE;
+    }
+
+    public List<ExprTree> getExprList() {
+        return exprList;
+    }
+
+    public Iterator<ExprTree> iterator() {
+        return exprList.iterator();
+    }
+
+    public void setValue(ExprList value) throws HyracksDataException {
+        if (value == null) {
+            clear();
+        } else {
+            copyFrom(value);
+        }
+    }
+
+    public void add(ExprTree expr) {
+        exprList.add(expr.self());
+    }
+
+    public void setExprList(List<ExprTree> exprList) {
+        this.exprList = exprList;
+    }
+
+    public ExprList(List<ExprTree> exprs) {
+        exprList = new ArrayList<ExprTree>();
+        copyList(exprs);
+        return;
+    }
+
+    public ExprList(ExprList other_list) throws HyracksDataException {
+        exprList = new ArrayList<ExprTree>();
+        copyFrom(other_list);
+        return;
+    }
+
+    public ExprList() {
+        exprList = new ArrayList<ExprTree>();
+    }
+
+    public ExprList(boolean b) {
+        this.exprList = new ArrayList<ExprTree>();
+        this.isShared = b;
+    }
+
+    public void clear() {
+        exprList.clear();
+    }
+
+    @Override
+    public ExprTree copy() throws HyracksDataException {
+        ExprList newList = new ExprList();
+        newList.copyFrom(this);
+        return newList;
+    }
+
+    @Override
+    public boolean sameAs(ExprTree tree) {
+        boolean is_same;
+        if (this == tree) {
+            is_same = true;
+        } else if (tree.getKind() != NodeKind.EXPR_LIST_NODE) {
+            is_same = false;
+        } else {
+            ExprList other_list = (ExprList) tree;
+            if (exprList.size() != other_list.size()) {
+                is_same = false;
+            } else {
+                is_same = true;
+                for (int i = 0; i < exprList.size(); i++) {
+                    if (!exprList.get(i).sameAs(other_list.get(i))) {
+                        is_same = false;
+                        break;
+                    }
+                }
+            }
+        }
+        return is_same;
+    }
+
+    public static ExprList createExprList(List<ExprTree> exprs) {
+        ExprList el = new ExprList();
+        el.copyList(exprs);
+        return el;
+    }
+
+    public static ExprList createExprList(ExprList exprs) {
+        ExprList el = new ExprList();
+        el.copyList(exprs.exprList);
+        return el;
+    }
+
+    public void getComponents(List<ExprTree> exprs) {
+        exprs.clear();
+        exprs.addAll(exprList);
+    }
+
+    public void getComponents(ExprList list) throws HyracksDataException {
+        list.clear();
+        list.addAll(exprList);
+        /*
+        for(ExprTree e: exprList){
+            list.add(e.Copy());
+        }*/
+    }
+
+    private void addAll(List<ExprTree> exprList) {
+        this.exprList.addAll(exprList);
+    }
+
+    public void insert(ExprTree t) {
+        exprList.add(t);
+    }
+
+    public void push_back(ExprTree t) {
+        exprList.add(t);
+    }
+
+    public void erase(int f, int to) {
+        int listInitialSize = exprList.size();
+        Iterator<ExprTree> it = exprList.iterator();
+        int i = 0;
+        while (i < listInitialSize && i < to) {
+            it.next();
+            if (i >= f) {
+                it.remove();
+            }
+            i++;
+        }
+        return;
+    }
+
+    public void erase(int index) {
+        exprList.remove(index);
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val) throws HyracksDataException {
+        val.setListValue(this);
+        return (true);
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val, ExprTreeHolder sig) throws HyracksDataException {
+        val.setListValue(this);
+        sig.setInnerTree(copy());
+        return (sig.getInnerTree() != null);
+    }
+
+    @Override
+    public boolean privateFlatten(EvalState state, Value val, ExprTreeHolder tree, AMutableInt32 aInt)
+            throws HyracksDataException {
+        ExprTreeHolder nexpr = new ExprTreeHolder();
+        Value tempVal = new Value();
+        ExprList newList = new ExprList();
+
+        tree.setInnerTree(null);; // Just to be safe...  wenger 2003-12-11.
+
+        for (ExprTree expr : exprList) {
+            // flatten the constituent expression
+            if (!expr.publicFlatten(state, tempVal, nexpr)) {
+                return false;
+            }
+            // if only a value was obtained, convert to an expression
+            if (nexpr.getInnerTree() == null) {
+                nexpr.setInnerTree(Literal.createLiteral(tempVal));
+                if (nexpr.getInnerTree() == null) {
+                    return false;
+                }
+            }
+            // add the new expression to the flattened list
+            newList.push_back(nexpr);
+        }
+        tree.setInnerTree(newList);
+        return true;
+    }
+
+    public void copyList(List<ExprTree> exprs) {
+        for (ExprTree expr : exprs) {
+            exprList.add(expr);
+        }
+    }
+
+    public boolean getValue(Value val, ExprTree tree, EvalState es) throws HyracksDataException {
+        EvalState currentState = new EvalState();
+
+        if (tree == null)
+            return false;
+
+        // if called from user code, es == NULL so we use &state instead
+        currentState = (es != null) ? es : state;
+
+        if (currentState.getDepthRemaining() <= 0) {
+            val.setErrorValue();
+            return false;
+        }
+        currentState.decrementDepth();
+
+        ClassAd tmpScope = currentState.getCurAd();
+        currentState.setCurAd(tree.getParentScope());
+        tree.publicEvaluate(currentState, val);
+        currentState.setCurAd(tmpScope);
+
+        currentState.incrementDepth();
+
+        return true;
+    }
+
+    @Override
+    public void reset() {
+        exprList.clear();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprTree.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprTree.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprTree.java
new file mode 100644
index 0000000..ccbfd8b
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprTree.java
@@ -0,0 +1,401 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * A node of the expression tree, which may be a literal, attribute reference,
+ * function call, classad, expression list, or an operator applied to other
+ * ExprTree operands.
+ */
+public abstract class ExprTree {
+
+    /// The kinds of nodes in expression trees
+    public enum NodeKind {
+        /// Literal node (string, integer, real, boolean, undefined, error)
+        LITERAL_NODE,
+        /// Attribute reference node (attr, .attr, expr.attr)
+        ATTRREF_NODE,
+        /// Expression operation node (unary, binary, ternary)/
+        OP_NODE,
+        /// Function call node
+        FN_CALL_NODE,
+        /// ClassAd node
+        CLASSAD_NODE,
+        /// Expression list node
+        EXPR_LIST_NODE,
+        /// Expression envelope.
+        EXPR_ENVELOPE
+    }
+
+    public enum EvalResult {
+        EVAL_FAIL,
+        EVAL_OK,
+        EVAL_UNDEF,
+        EVAL_ERROR
+    };
+
+    public static final int EVAL_FAIL_Int = 0;
+    public static final int EVAL_OK_Int = 1;
+    public static final int EVAL_UNDEF_Int = 2;
+    public static final int EVAL_ERROR_Int = 3;
+
+    public static final int MAX_CLASSAD_RECURSION = 1000;
+
+    public boolean isTreeHolder() {
+        return false;
+    }
+
+    public int size;
+    public ClassAd parentScope;
+
+    private CallableDebugFunction userDebugFunction;
+
+    public abstract void reset();
+
+    public ExprTree() {
+        this.parentScope = null;
+        this.size = 0;
+    }
+
+    public ExprTree(ExprTree expr) {
+        this.size = expr.size;
+    }
+
+    public void resetExprTree(ExprTree expr) {
+        if (expr == null) {
+            this.size = 0;
+        } else {
+            this.size = expr.size;
+        }
+    }
+
+    public static class ExprHash {
+        public static int call(ExprTree x) {
+            return x.size;
+        }
+    }
+
+    public ExprTree getTree() {
+        return this;
+    }
+
+    /**
+     * Sets the lexical parent scope of the expression, which is used to
+     * determine the lexical scoping structure for resolving attribute
+     * references. (However, the semantic parent may be different from
+     * the lexical parent if a <tt>super</tt> attribute is specified.)
+     * This method is automatically called when expressions are
+     * inserted into ClassAds, and should thus be called explicitly
+     * only when evaluating expressions which haven't been inserted
+     * into a ClassAd.
+     */
+    public void setParentScope(ClassAd scope) {
+        if (scope == null) {
+            parentScope = null;
+            return;
+        }
+        if (parentScope == null) {
+            parentScope = new ClassAd();
+        }
+        parentScope.setValue(scope);
+        privateSetParentScope(scope);
+    }
+
+    abstract protected void privateSetParentScope(ClassAd scope);
+
+    /**
+     * Gets the parent scope of the expression.
+     *
+     * @return The parent scope of the expression.
+     */
+    public ClassAd getParentScope() {
+        return parentScope;
+    }
+
+    /**
+     * Makes a deep copy of the expression tree
+     *
+     * @return A deep copy of the expression, or NULL on failure.
+     * @throws HyracksDataException
+     */
+
+    public abstract ExprTree copy() throws HyracksDataException;
+
+    /**
+     * Gets the node kind of this expression node.
+     *
+     * @return The node kind. Child nodes MUST implement this.
+     * @see NodeKind
+     */
+    public abstract NodeKind getKind();
+
+    /**
+     * To eliminate the mass of external checks to see if the ExprTree is
+     * a classad.
+     */
+    public static boolean isClassAd(Object o) {
+        return (o instanceof ClassAd);
+    }
+
+    /**
+     * Return a ptr to the raw exprtree below the interface
+     */
+
+    public ExprTree self() {
+        return this;
+    }
+
+    /// A debugging method; send expression to stdout
+    public void puke() throws HyracksDataException {
+        PrettyPrint unp = new PrettyPrint();
+        AMutableCharArrayString buffer = new AMutableCharArrayString();
+        unp.unparse(buffer, this);
+        System.out.println(buffer.toString());
+    }
+
+    //Pass in a pointer to a function taking a const char *, which will
+    //print it out somewhere useful, when the classad debug() function
+    //is called
+    public void setUserDebugFunction(CallableDebugFunction dbf) {
+        this.userDebugFunction = dbf;
+    }
+
+    public void debugPrint(String message) {
+        if (userDebugFunction != null) {
+            userDebugFunction.call(message);
+        }
+    }
+
+    public void debugFormatValue(Value value) throws HyracksDataException {
+        debugFormatValue(value, 0.0);
+    }
+
+    public void debugFormatValue(Value value, double time) throws HyracksDataException {
+        MutableBoolean boolValue = new MutableBoolean(false);
+        AMutableInt64 intValue = new AMutableInt64(0);
+        AMutableDouble doubleValue = new AMutableDouble(0.0);
+        AMutableCharArrayString stringValue = new AMutableCharArrayString();
+
+        if (NodeKind.CLASSAD_NODE == getKind())
+            return;
+
+        PrettyPrint unp = new PrettyPrint();
+        AMutableCharArrayString buffer = new AMutableCharArrayString();
+        unp.unparse(buffer, this);
+
+        String result = "Classad debug: ";
+        if (time != 0) {
+            String buf = String.format("%5.5fms", time * 1000);
+            result += "[";
+            result += buf;
+            result += "] ";
+        }
+        result += buffer;
+        result += " --> ";
+
+        switch (value.getType()) {
+            case NULL_VALUE:
+                result += "NULL\n";
+                break;
+            case ERROR_VALUE:
+                if ((NodeKind.FN_CALL_NODE == getKind()) && !((FunctionCall) (this)).functionIsDefined()) {
+                    result += "ERROR (function is not defined)\n";
+                } else {
+                    result += "ERROR\n";
+                }
+                break;
+            case UNDEFINED_VALUE:
+                result += "UNDEFINED\n";
+                break;
+            case BOOLEAN_VALUE:
+                if (value.isBooleanValue(boolValue))
+                    result += boolValue.booleanValue() ? "TRUE\n" : "FALSE\n";
+                break;
+            case INTEGER_VALUE:
+                if (value.isIntegerValue(intValue)) {
+                    result += String.format("%lld", intValue.getLongValue());
+                    result += "\n";
+                }
+                break;
+
+            case REAL_VALUE:
+                if (value.isRealValue(doubleValue)) {
+                    result += String.format("%lld", doubleValue.getDoubleValue());
+                    result += "\n";
+                }
+                break;
+            case RELATIVE_TIME_VALUE:
+                result += "RELATIVE TIME\n";
+                break;
+            case ABSOLUTE_TIME_VALUE:
+                result += "ABSOLUTE TIME\n";
+                break;
+            case STRING_VALUE:
+                if (value.isStringValue(stringValue)) {
+                    result += stringValue.toString();
+                    result += "\n";
+                }
+                break;
+            case CLASSAD_VALUE:
+                result += "CLASSAD\n";
+                break;
+            case LIST_VALUE:
+                result += "LIST\n";
+                break;
+            case SLIST_VALUE:
+                result += "SLIST\n";
+                break;
+        }
+        debugPrint(result);
+    }
+
+    /**
+     * Evaluate this tree
+     *
+     * @param state
+     *            The current state
+     * @param val
+     *            The result of the evaluation
+     * @return true on success, false on failure
+     * @throws HyracksDataException
+     */
+
+    public boolean publicEvaluate(EvalState state, Value val) throws HyracksDataException {
+        return privateEvaluate(state, val);
+    }
+
+    public boolean publicEvaluate(EvalState state, Value val, ExprTreeHolder sig) throws HyracksDataException {
+        return privateEvaluate(state, val, sig);
+    }
+
+    public abstract boolean privateEvaluate(EvalState state, Value val) throws HyracksDataException;
+
+    public abstract boolean privateEvaluate(EvalState state, Value val, ExprTreeHolder tree)
+            throws HyracksDataException;
+
+    /**
+     * Evaluate this tree.
+     * This only works if the expression is currently part of a ClassAd.
+     *
+     * @param val
+     *            The result of the evaluation
+     * @return true on success, false on failure
+     * @throws HyracksDataException
+     */
+    public boolean publicEvaluate(Value val) throws HyracksDataException {
+        EvalState state = new EvalState();
+        if (parentScope == null) {
+            val.setErrorValue();
+            return false;
+        } else {
+            state.setScopes(parentScope);
+            return (publicEvaluate(state, val));
+        }
+    }
+
+    /**
+     * Is this ExprTree the same as the tree?
+     *
+     * @return true if it is the same, false otherwise
+     */
+    public abstract boolean sameAs(ExprTree tree);
+
+    /**
+     * Fill in this ExprTree with the contents of the other ExprTree.
+     *
+     * @return true if the copy succeeded, false otherwise.
+     * @throws HyracksDataException
+     */
+    public void copyFrom(ExprTree tree) throws HyracksDataException {
+        if (!this.equals(tree)) {
+            parentScope = tree.parentScope;
+        }
+        return;
+    }
+
+    public interface CallableDebugFunction {
+        public void call(String message);
+    }
+
+    public boolean publicEvaluate(Value val, ExprTreeHolder sig) throws HyracksDataException {
+        EvalState state = new EvalState();
+        state.setScopes(parentScope);
+        return (publicEvaluate(state, val, sig));
+    }
+
+    public boolean publicFlatten(Value val, ExprTreeHolder tree) throws HyracksDataException {
+        EvalState state = new EvalState();
+        state.setScopes(parentScope);
+        return (publicFlatten(state, val, tree));
+    }
+
+    public boolean publicFlatten(EvalState state, Value val, ExprTreeHolder tree, AMutableInt32 op)
+            throws HyracksDataException {
+        return (privateFlatten(state, val, tree, op));
+    }
+
+    public boolean publicFlatten(EvalState state, Value val, ExprTreeHolder tree) throws HyracksDataException {
+        return (privateFlatten(state, val, tree, null));
+    }
+
+    public abstract boolean privateFlatten(EvalState state, Value val, ExprTreeHolder tree, AMutableInt32 op)
+            throws HyracksDataException;
+
+    public boolean isClassad(ClassAd ptr) {
+        return (ptr instanceof ClassAd);
+    }
+
+    public int exprHash(ExprTree expr, int numBkts) {
+        int result = expr.getKind().ordinal() + 1000;
+        result += numBkts * (3 / 2);
+        return (result % numBkts);
+    }
+
+    @Override
+    public String toString() {
+        ClassAdUnParser unparser = new PrettyPrint();
+        AMutableCharArrayString string_representation = new AMutableCharArrayString();
+
+        try {
+            unparser.unparse(string_representation, this);
+        } catch (HyracksDataException e) {
+            e.printStackTrace();
+        }
+        return string_representation.toString();
+
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o instanceof ExprTree) {
+            return sameAs((ExprTree) o);
+        }
+        return false;
+    }
+
+    public int size() {
+        return size;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprTreeHolder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprTreeHolder.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprTreeHolder.java
new file mode 100644
index 0000000..89c5c0b
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ExprTreeHolder.java
@@ -0,0 +1,144 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ExprTreeHolder extends ExprTree {
+    private ExprTree innerTree;
+
+    @Override
+    public ClassAd getParentScope() {
+        return innerTree.parentScope;
+    }
+
+    @Override
+    public void copyFrom(ExprTree tree) throws HyracksDataException {
+        if (tree == null) {
+            innerTree = null;
+        } else {
+            if (tree.isTreeHolder()) {
+                tree = ((ExprTreeHolder) tree).innerTree;
+            }
+            if (innerTree == null) {
+                innerTree = tree.copy();
+            } else {
+                innerTree.copyFrom(tree);
+            }
+        }
+    }
+
+    @Override
+    public void reset() {
+        this.innerTree = null;
+    }
+
+    @Override
+    public void puke() throws HyracksDataException {
+        PrettyPrint unp = new PrettyPrint();
+        AMutableCharArrayString buffer = new AMutableCharArrayString();
+        unp.unparse(buffer, innerTree);
+        System.out.println(buffer.toString());
+    }
+
+    @Override
+    public void resetExprTree(ExprTree expr) {
+        setInnerTree(expr);
+    }
+
+    @Override
+    public ExprTree getTree() {
+        return innerTree;
+    }
+
+    @Override
+    public ExprTree self() {
+        return innerTree;
+    }
+
+    @Override
+    public boolean isTreeHolder() {
+        return true;
+    }
+
+    public ExprTreeHolder() {
+        innerTree = null;
+    }
+
+    public ExprTreeHolder(ExprTree tree) {
+        setInnerTree(tree);
+    }
+
+    public ExprTree getInnerTree() {
+        return innerTree;
+    }
+
+    public void setInnerTree(ExprTree innerTree) {
+        if (innerTree != null && innerTree.isTreeHolder()) {
+            setInnerTree(((ExprTreeHolder) innerTree).getInnerTree());
+        } else {
+            this.innerTree = innerTree;
+        }
+    }
+
+    @Override
+    public ExprTree copy() throws HyracksDataException {
+        return innerTree.copy();
+    }
+
+    @Override
+    public NodeKind getKind() {
+        return innerTree.getKind();
+    }
+
+    @Override
+    public boolean sameAs(ExprTree tree) {
+        if (tree == null) {
+            return innerTree == null;
+        }
+        return innerTree == null ? false : innerTree.sameAs(tree);
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val) throws HyracksDataException {
+        return innerTree.privateEvaluate(state, val);
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val, ExprTreeHolder tree) throws HyracksDataException {
+        return innerTree.privateEvaluate(state, val, tree);
+    }
+
+    @Override
+    public boolean privateFlatten(EvalState state, Value val, ExprTreeHolder tree, AMutableInt32 op)
+            throws HyracksDataException {
+        return innerTree.privateFlatten(state, val, tree, op);
+    }
+
+    @Override
+    public int size() {
+        return innerTree != null ? 1 : 0;
+    }
+
+    @Override
+    protected void privateSetParentScope(ClassAd scope) {
+        innerTree.privateSetParentScope(scope);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/FileLexerSource.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/FileLexerSource.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/FileLexerSource.java
new file mode 100644
index 0000000..cfa8932
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/FileLexerSource.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.io.BufferedReader;
+import java.io.File;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+
+public class FileLexerSource extends LexerSource {
+    // This source allows input from a file
+    private BufferedReader reader;
+    private boolean unread;
+    private boolean finished;
+    private boolean nextRead;
+    private char nextChar;
+
+    public FileLexerSource(File file) throws IOException {
+        this.reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8);
+    }
+
+    public void setNewSource(File file) throws IOException {
+        if (this.reader != null) {
+            reader.close();
+        }
+        this.reader = Files.newBufferedReader(file.toPath(), StandardCharsets.UTF_8);
+    }
+
+    @Override
+    public char readCharacter() throws IOException {
+        if (unread) {
+            unread = false;
+            return previousCharacter;
+        } else if (nextRead) {
+            nextRead = false;
+            return nextChar;
+        }
+        previousCharacter = (char) reader.read();
+        return previousCharacter;
+    }
+
+    @Override
+    public void unreadCharacter() throws IOException {
+        if (nextRead) {
+            throw new IOException("Unexpected Situation");
+        } else if (unread) {
+            throw new IOException("This lexer source supports only one step back");
+        }
+        unread = true;
+    }
+
+    @Override
+    public boolean atEnd() throws IOException {
+        if (finished) {
+            return true;
+        } else if (nextRead) {
+            return false;
+        }
+        nextChar = (char) reader.read();
+        if (nextChar < 0) {
+            finished = true;
+        } else {
+            nextRead = true;
+        }
+        return finished;
+    }
+
+    @Override
+    public char[] getBuffer() {
+        return null;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/FunctionCall.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/FunctionCall.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/FunctionCall.java
new file mode 100644
index 0000000..bbc0e7a
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/FunctionCall.java
@@ -0,0 +1,354 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.util.HashMap;
+
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableString;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FunctionCall extends ExprTree {
+
+    public static boolean initialized = false;
+
+    public static final ClassAdFunc[] ClassAdBuiltinFunc = { BuiltinClassAdFunctions.IsType,
+            BuiltinClassAdFunctions.TestMember, BuiltinClassAdFunctions.Size, BuiltinClassAdFunctions.SumAvg,
+            BuiltinClassAdFunctions.MinMax, BuiltinClassAdFunctions.ListCompare, BuiltinClassAdFunctions.debug,
+            BuiltinClassAdFunctions.formatTime, BuiltinClassAdFunctions.getField, BuiltinClassAdFunctions.currentTime,
+            BuiltinClassAdFunctions.timeZoneOffset, BuiltinClassAdFunctions.splitTime, BuiltinClassAdFunctions.dayTime,
+            BuiltinClassAdFunctions.epochTime, BuiltinClassAdFunctions.strCat, BuiltinClassAdFunctions.changeCase,
+            BuiltinClassAdFunctions.subString, BuiltinClassAdFunctions.convInt, BuiltinClassAdFunctions.compareString,
+            BuiltinClassAdFunctions.matchPattern, BuiltinClassAdFunctions.matchPatternMember,
+            BuiltinClassAdFunctions.substPattern, BuiltinClassAdFunctions.convReal, BuiltinClassAdFunctions.convString,
+            BuiltinClassAdFunctions.unparse, BuiltinClassAdFunctions.convBool, BuiltinClassAdFunctions.convTime,
+            BuiltinClassAdFunctions.doRound, BuiltinClassAdFunctions.doMath2, BuiltinClassAdFunctions.random,
+            BuiltinClassAdFunctions.ifThenElse, BuiltinClassAdFunctions.stringListsIntersect,
+            BuiltinClassAdFunctions.interval, BuiltinClassAdFunctions.eval };
+
+    // function call specific information
+    private String functionName;
+    private ClassAdFunc function;
+    private ExprList arguments;
+    public static final HashMap<String, ClassAdFunc> funcTable = new HashMap<String, ClassAdFunc>();
+
+    static {
+        // load up the function dispatch table
+        // type predicates
+        funcTable.put("isundefined", BuiltinClassAdFunctions.IsType);
+        funcTable.put("iserror", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isstring", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isinteger", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isreal", BuiltinClassAdFunctions.IsType);
+        funcTable.put("islist", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isclassad", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isboolean", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isabstime", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isreltime", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isundefined", BuiltinClassAdFunctions.IsType);
+        funcTable.put("isundefined", BuiltinClassAdFunctions.IsType);
+        // list membership
+        funcTable.put("member", BuiltinClassAdFunctions.TestMember);
+        funcTable.put("identicalmember", BuiltinClassAdFunctions.TestMember);
+        // Some list functions, useful for lists as sets
+        funcTable.put("size", BuiltinClassAdFunctions.Size);
+        funcTable.put("sum", BuiltinClassAdFunctions.SumAvg);
+        funcTable.put("avg", BuiltinClassAdFunctions.SumAvg);
+        funcTable.put("min", BuiltinClassAdFunctions.MinMax);
+        funcTable.put("max", BuiltinClassAdFunctions.MinMax);
+        funcTable.put("anycompare", BuiltinClassAdFunctions.ListCompare);
+        funcTable.put("allcompare", BuiltinClassAdFunctions.ListCompare);
+        //basic functions
+        /*
+        funcTable.put("sumfrom", BuiltinFunctions.SumAvgFrom);
+        funcTable.put("avgfrom", BuiltinFunctions.SumAvgFrom);
+        funcTable.put("maxfrom", BuiltinFunctions.BoundFrom);
+        funcTable.put("minfrom", BuiltinFunctions.BoundFrom);
+        */
+        // time management
+        funcTable.put("time", BuiltinClassAdFunctions.epochTime);
+        funcTable.put("currenttime", BuiltinClassAdFunctions.currentTime);
+        funcTable.put("timezoneoffset", BuiltinClassAdFunctions.timeZoneOffset);
+        funcTable.put("daytime", BuiltinClassAdFunctions.dayTime);
+        funcTable.put("getyear", BuiltinClassAdFunctions.getField);
+        funcTable.put("getmonth", BuiltinClassAdFunctions.getField);
+        funcTable.put("getdayofyear", BuiltinClassAdFunctions.getField);
+        funcTable.put("getdayofmonth", BuiltinClassAdFunctions.getField);
+        funcTable.put("getdayofweek", BuiltinClassAdFunctions.getField);
+        funcTable.put("getdays", BuiltinClassAdFunctions.getField);
+        funcTable.put("gethours", BuiltinClassAdFunctions.getField);
+        funcTable.put("getminutes", BuiltinClassAdFunctions.getField);
+        funcTable.put("getseconds", BuiltinClassAdFunctions.getField);
+        funcTable.put("splittime", BuiltinClassAdFunctions.splitTime);
+        funcTable.put("formattime", BuiltinClassAdFunctions.formatTime);
+        // string manipulation
+        funcTable.put("strcat", BuiltinClassAdFunctions.strCat);
+        funcTable.put("toupper", BuiltinClassAdFunctions.changeCase);
+        funcTable.put("tolower", BuiltinClassAdFunctions.changeCase);
+        funcTable.put("substr", BuiltinClassAdFunctions.subString);
+        funcTable.put("strcmp", BuiltinClassAdFunctions.compareString);
+        funcTable.put("stricmp", BuiltinClassAdFunctions.compareString);
+        // pattern matching (regular expressions)
+        funcTable.put("regexp", BuiltinClassAdFunctions.matchPattern);
+        funcTable.put("regexpmember", BuiltinClassAdFunctions.matchPatternMember);
+        funcTable.put("regexps", BuiltinClassAdFunctions.substPattern);
+        // conversion functions
+        funcTable.put("int", BuiltinClassAdFunctions.convInt);
+        funcTable.put("real", BuiltinClassAdFunctions.convReal);
+        funcTable.put("string", BuiltinClassAdFunctions.convString);
+        funcTable.put("bool", BuiltinClassAdFunctions.convBool);
+        funcTable.put("abstime", BuiltinClassAdFunctions.convTime);
+        funcTable.put("reltime", BuiltinClassAdFunctions.convTime);
+
+        // turn the contents of an expression into a string
+        // but *do not* evaluate it
+
+        funcTable.put("unparse", BuiltinClassAdFunctions.unparse);
+        // mathematical functions
+        funcTable.put("floor", BuiltinClassAdFunctions.doRound);
+        funcTable.put("ceil", BuiltinClassAdFunctions.doRound);
+        funcTable.put("ceiling", BuiltinClassAdFunctions.doRound);
+        funcTable.put("round", BuiltinClassAdFunctions.doRound);
+        funcTable.put("pow", BuiltinClassAdFunctions.doMath2);
+        funcTable.put("quantize", BuiltinClassAdFunctions.doMath2);
+        funcTable.put("random", BuiltinClassAdFunctions.random);
+
+        // for compatibility with old classads:
+        funcTable.put("ifthenelse", BuiltinClassAdFunctions.ifThenElse);
+        funcTable.put("interval", BuiltinClassAdFunctions.interval);
+        funcTable.put("eval", BuiltinClassAdFunctions.eval);
+
+        // string list functions:
+        // Note that many other string list functions are defined
+        // externally in the Condor classad compatibility layer.
+
+        funcTable.put("stringlistsintersect", BuiltinClassAdFunctions.stringListsIntersect);
+        funcTable.put("debug", BuiltinClassAdFunctions.debug);
+        initialized = true;
+    }
+
+    /**
+     * Returns true if the function expression points to a valid
+     * function in the ClassAd library.
+     */
+    public boolean functionIsDefined() {
+        return function != null;
+    }
+
+    public void copyFrom(FunctionCall copiedFrom) throws HyracksDataException {
+        this.function = copiedFrom.function;
+        this.functionName = copiedFrom.functionName;
+        if (this.arguments == null) {
+            this.arguments = (ExprList) copiedFrom.arguments.copy();
+        } else {
+            this.arguments.copyFrom(copiedFrom.arguments);
+        }
+    }
+
+    public FunctionCall() {
+        functionName = null;
+        function = null;
+        arguments = null;
+    }
+
+    public static FunctionCall createFunctionCall(String functionName, ExprList args) {
+        FunctionCall fc = new FunctionCall();
+        fc.function = funcTable.get(functionName.toLowerCase());
+        fc.functionName = functionName;
+        fc.arguments = args;
+        return fc;
+    }
+
+    // start up with an argument list of size 4
+
+    public FunctionCall(FunctionCall functioncall) throws HyracksDataException {
+        copyFrom(functioncall);
+    }
+
+    @Override
+    public ExprTree copy() throws HyracksDataException {
+        FunctionCall newTree = new FunctionCall();
+        newTree.copyFrom(this);
+        return newTree;
+    }
+
+    @Override
+    public void copyFrom(ExprTree tree) throws HyracksDataException {
+        FunctionCall functioncall = (FunctionCall) tree;
+        functionName = functioncall.functionName;
+        function = functioncall.function;
+        arguments.copyFrom(arguments);
+        super.copyFrom(functioncall);
+    }
+
+    @Override
+    public boolean sameAs(ExprTree tree) {
+        boolean is_same = false;
+        FunctionCall other_fn;
+        ExprTree pSelfTree = tree.self();
+
+        if (this == pSelfTree) {
+            is_same = true;
+        } else if (pSelfTree.getKind() != NodeKind.FN_CALL_NODE) {
+            is_same = false;
+        } else {
+            other_fn = (FunctionCall) pSelfTree;
+            if (functionName == other_fn.functionName && function.equals(other_fn.function)
+                    && arguments.equals(other_fn.arguments)) {
+                is_same = true;
+
+            } else {
+                is_same = false;
+            }
+        }
+        return is_same;
+    }
+
+    public boolean equals(FunctionCall fn) {
+        return sameAs(fn);
+    }
+
+    public static HashMap<String, ClassAdFunc> getFunctionTable() {
+        return funcTable;
+    }
+
+    public static synchronized void registerFunction(String functionName, ClassAdFunc function) {
+        if (!funcTable.containsKey(functionName)) {
+            funcTable.put(functionName, function);
+        }
+    }
+
+    @Override
+    public void privateSetParentScope(ClassAd parent) {
+        arguments.privateSetParentScope(parent);
+    }
+
+    //This will move pointers to objects (not create clones)
+    public void getComponents(AMutableString fn, ExprList exprList) {
+        fn.setValue(functionName);
+        for (ExprTree tree : arguments.getExprList()) {
+            exprList.add(tree);
+        }
+    }
+
+    public void getComponents(AMutableCharArrayString fn, ExprList exprList) {
+        fn.setValue(functionName);
+        for (ExprTree tree : arguments.getExprList()) {
+            exprList.add(tree);
+        }
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value value) throws HyracksDataException {
+        if (function != null) {
+            return function.call(functionName, arguments, state, value);
+        } else {
+            value.setErrorValue();
+            return (true);
+        }
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value value, ExprTreeHolder tree) throws HyracksDataException {
+        FunctionCall tmpSig = new FunctionCall();
+        Value tmpVal = new Value();
+        ExprTreeHolder argSig = new ExprTreeHolder();
+        MutableBoolean rval = new MutableBoolean();
+        if (!privateEvaluate(state, value)) {
+            return false;
+        }
+        tmpSig.functionName = functionName;
+        rval.setValue(true);
+        for (ExprTree i : arguments.getExprList()) {
+            rval.setValue(i.publicEvaluate(state, tmpVal, argSig));
+            if (rval.booleanValue())
+                tmpSig.arguments.add(argSig.getInnerTree());
+        }
+        tree.setInnerTree(tmpSig);
+        return rval.booleanValue();
+    }
+
+    @Override
+    public boolean privateFlatten(EvalState state, Value value, ExprTreeHolder tree, AMutableInt32 i)
+            throws HyracksDataException {
+        FunctionCall newCall = new FunctionCall();
+        ExprTreeHolder argTree = new ExprTreeHolder();
+        Value argValue = new Value();
+        boolean fold = true;
+
+        tree.setInnerTree(null); // Just to be safe...  wenger 2003-12-11.
+
+        // if the function cannot be resolved, the value is "error"
+        if (function == null) {
+            value.setErrorValue();
+            return true;
+        }
+
+        newCall.functionName = functionName;
+        newCall.function = function;
+
+        // flatten the arguments
+        for (ExprTree exp : arguments.getExprList()) {
+            if (exp.publicFlatten(state, argValue, argTree)) {
+                if (argTree.getInnerTree() != null) {
+                    newCall.arguments.add(argTree.getInnerTree());
+                    fold = false;
+                    continue;
+                } else {
+                    // Assert: argTree == NULL
+                    argTree.setInnerTree(Literal.createLiteral(argValue));
+                    if (argTree.getInnerTree() != null) {
+                        newCall.arguments.add(argTree.getInnerTree());
+                        continue;
+                    }
+                }
+            }
+
+            // we get here only when something bad happens
+            value.setErrorValue();
+            tree.setInnerTree(null);
+            return false;
+        }
+
+        // assume all functions are "pure" (i.e., side-affect free)
+        if (fold) {
+            // flattened to a value
+            if (!function.call(functionName, arguments, state, value)) {
+                return false;
+            }
+            tree.setInnerTree(null);
+        } else {
+            tree.setInnerTree(newCall);
+        }
+        return true;
+    }
+
+    @Override
+    public NodeKind getKind() {
+        return NodeKind.FN_CALL_NODE;
+    }
+
+    @Override
+    public void reset() {
+        this.arguments.clear();
+        this.function = null;
+        this.functionName = "";
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/InputStreamLexerSource.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/InputStreamLexerSource.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/InputStreamLexerSource.java
new file mode 100644
index 0000000..38ab591
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/InputStreamLexerSource.java
@@ -0,0 +1,111 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+
+public class InputStreamLexerSource extends LexerSource {
+    // This source allows input from a stream. Note that
+    // the user passes in a pointer to the stream.
+
+    public int position = 0;
+    public char[] buffer = new char[512];
+    private BufferedReader reader;
+    public int validBytes;
+
+    public InputStreamLexerSource(InputStream in) {
+        this.reader = new BufferedReader(new InputStreamReader(in));
+    }
+
+    @Override
+    public int getPosition() {
+        return position;
+    }
+
+    public InputStreamLexerSource() {
+    }
+
+    public InputStreamLexerSource(BufferedReader reader) {
+        this.reader = reader;
+    }
+
+    @Override
+    public char readCharacter() throws IOException {
+        if (position < validBytes) {
+            previousCharacter = buffer[position];
+            position++;
+            return previousCharacter;
+        } else {
+            fillBuffer();
+        }
+        if (position < validBytes) {
+            previousCharacter = buffer[position];
+            position++;
+            return previousCharacter;
+        }
+        return '\0';
+    }
+
+    private void fillBuffer() throws IOException {
+        position = 0;
+        // we leave an empty location at the end to take care of corner case of unread
+        validBytes = reader.read(buffer, 0, buffer.length - 1);
+    }
+
+    @Override
+    public void unreadCharacter() {
+        if (position == 0) {
+            System.arraycopy(buffer, 0, buffer, 1, buffer.length - 1);
+            buffer[0] = previousCharacter;
+            validBytes++;
+            return;
+        } else {
+            position--;
+        }
+    }
+
+    @Override
+    public boolean atEnd() throws IOException {
+        if (position < validBytes) {
+            return false;
+        }
+        fillBuffer();
+        return position == validBytes;
+    }
+
+    public void setNewSource(InputStream stream) {
+        this.reader = new BufferedReader(new InputStreamReader(stream));
+        this.position = 0;
+        this.validBytes = 0;
+    }
+
+    public void setNewSource(BufferedReader reader) {
+        this.reader = reader;
+        this.position = 0;
+        this.validBytes = 0;
+    }
+
+    @Override
+    public char[] getBuffer() {
+        return buffer;
+    }
+}


[18/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
new file mode 100644
index 0000000..a155b33
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feed-with-external-parser/feed-with-external-parser.1.adm
@@ -0,0 +1,99 @@
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID
 : 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatch
 Time": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": 122, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "Condor
 Platform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "fabp4-0002+fabp4-0002", "PeriodicRelease": "error", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) 
 ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "undefined", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-sur
 f-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "ssericksen@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 
 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": 122, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e272.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulation
 _condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 10
 23 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( 
 JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicReleas
 e": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e286.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor
 /model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 1
 023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=
 ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRele
 ase": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e457.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condo
 r/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( 
 ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partitio
 nableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,
 Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expect
 edMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_Jo
 bStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDra
 iningCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Resident
 SetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/data
 /shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Args":
  "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e445.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e372.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=
 ( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partit
 ionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBus
 y,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expe
 ctedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_
 JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulD
 rainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Reside
 ntSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "Loca
 lUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_co
 ndor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607.0d
 , "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=401 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 102
 3 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": 73, "PeriodicReleaseExpr": "exp
 r=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 26620, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c064.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Sim
 ulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e423.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "
 expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "Peri
 odicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulat
 ion_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "exp
 r=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "Periodi
 cRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e442.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulatio
 n_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=
 ( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partit
 ionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBus
 y,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expe
 ctedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_
 JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulD
 rainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Reside
 ntSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "Loca
 lUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_co
 ndor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233.0d
 , "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=162 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30542.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133562, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_523030,ChtcWrapper333.out,AuditLog.333,simu_3_333.txt,harvest.log,333.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "
 expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25876, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/333/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25876.0d, "LastJobLeaseRenewal": 1446133562, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "333+333", "Peri
 odicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 157.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/333/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133562, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simula
 tion_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582154, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25876.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=333 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14242#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/333", "QDate": 1446105553, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582177.0#1446105581", "StatsLifetimeStarter": 25025, "JobStartDate": 1446108665, "SubmitEventNotes": "DAG Node: 145+145", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.57", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446108665, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25026.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133691, "ResidentSetSize_RAW": 73308, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 102
 3 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24770.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 120972, "BytesSent": 28290.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446133691, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 146, "SpooledOutputFiles": "CURLTIME_4179033,ChtcWrapper145.out,AuditLog.145,simu_3_145.txt,harvest.log,145.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446108666, "ExitBySignal": false, "LastMatchTime": 1446108665, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 796, "DAGManJobId": 49581933, "MemoryUsage": 73, "PeriodicReleaseExpr": "e
 xpr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 28476, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25026, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/145/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25026.0d, "LastJobLeaseRenewal": 1446133691, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "145+145", "P
 eriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c038.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 217.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/145/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133691, "StreamErr": false, "RecentBlockReadKbytes": 1932, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/
 Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582177, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25026.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=145 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.57:49793>#1445322694#1541#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/145", "QDate": 1446105581, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582178.0#1446105581", "StatsLifetimeStarter": 24871, "JobStartDate": 1446108666, "SubmitEventNotes": "DAG Node: 154+154", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.158", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446108666, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24874.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133540, "ResidentSetSize_RAW": 125792, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24626.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30559.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133540, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1382128,ChtcWrapper154.out,AuditLog.154,simu_3_154.txt,harvest.log,154.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446108668, "ExitBySignal": false, "LastMatchTime": 1446108666, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime"

<TRUNCATED>


[31/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/data/external-parser/jobads.new
----------------------------------------------------------------------
diff --git a/asterix-app/data/external-parser/jobads.new b/asterix-app/data/external-parser/jobads.new
new file mode 100644
index 0000000..2ca4919
--- /dev/null
+++ b/asterix-app/data/external-parser/jobads.new
@@ -0,0 +1,12869 @@
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446112223; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.179100000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.850540000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_2696692,ChtcWrapper159.out,AuditLog.159,simu_3_159.txt,harvest.log,159.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 2.195400000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.056100000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/159"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134176; 
+        LastMatchTime = 1446112222; 
+        LastJobLeaseRenewal = 1446134176; 
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582557; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=159 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134177; 
+        QDate = 1446105741; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/159/process.log"; 
+        JobCurrentStartDate = 1446112222; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "159+159"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 21954; 
+        AutoClusterId = 24; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.152"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49581933; 
+        EnteredCurrentStatus = 1446134176; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446112222; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582557.0#1446105741"; 
+        RemoteSysCpu = 1.370000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.195400000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e352.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/159/,/home/xguo23/finally_2/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 125604; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.152:39021>#1444772294#9281#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 159+159"; 
+        CumulativeSlotTime = 2.195400000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 21953; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446111648; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.235300000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_818403,ChtcWrapper211.out,AuditLog.211,simu_3_211.txt,harvest.log,211.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 2.252000000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.060300000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134167; 
+        QDate = 1446105734; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134167; 
+        LastMatchTime = 1446111647; 
+        LastJobLeaseRenewal = 1446134167; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582533; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211/process.log"; 
+        JobCurrentStartDate = 1446111647; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=211 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "211+211"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 22520; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.61"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134167; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446111647; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582533.0#1446105734"; 
+        RemoteSysCpu = 1.370000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.252000000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e261.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/211/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 126608; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.61:49736>#1444759807#6759#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 211+211"; 
+        CumulativeSlotTime = 2.252000000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 22519; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446134109; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.400000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "chtcinnerwrapper,CURLTIME_137795,ChtcWrapper11021.out,R2011b_INFO,AuditLog.11021,SLIBS2.tar.gz,CODEBLOWUP"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5124; 
+        RemoteWallClockTime = 5.800000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727270000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 160; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 160; 
+        RecentStatsLifetimeStarter = 48; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134165; 
+        QDate = 1446134012; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134165; 
+        LastMatchTime = 1446134107; 
+        LastJobLeaseRenewal = 1446134165; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49584018; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/11021/process.log"; 
+        JobCurrentStartDate = 1446134107; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=11021 --"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "11021+11021"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 58; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.39"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134165; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446134107; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49584018.0#1446134012"; 
+        RemoteSysCpu = 1.200000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 5.800000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 14; 
+        LastRemoteHost = "slot1@e239.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/11021/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5124; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.39:54850>#1445038698#5043#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 11021+11021"; 
+        CumulativeSlotTime = 5.800000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 14; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1139127; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 56; 
+        ImageSize = 7500; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        WantGlidein = true; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/11021"
+    ]
+
+    [
+        BlockWrites = 4; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446108996; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.477600000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.850540000000000E+05; 
+        ResidentSetSize = 100000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "harvest.log,ChtcWrapper407.out,AuditLog.407,CURLTIME_1861323,407.out,simu_3_407.txt"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 123648; 
+        RemoteWallClockTime = 2.513300000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.056100000000000E+04; 
+        LastRejMatchReason = "PREEMPTION_REQUIREMENTS == False "; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 3976; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 30280; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/407"; 
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134128; 
+        LastMatchTime = 1446108995; 
+        LastJobLeaseRenewal = 1446134128; 
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582261; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=407 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134128; 
+        QDate = 1446105631; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/407/process.log"; 
+        JobCurrentStartDate = 1446108995; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "407+407"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 25133; 
+        AutoClusterId = 38210; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 16; 
+        ExitBySignal = false; 
+        DAGManJobId = 49581933; 
+        EnteredCurrentStatus = 1446134128; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446108995; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582261.0#1446105631"; 
+        RemoteSysCpu = 2.770000000000000E+02; 
+        LastRejMatchTime = 1446108994; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.513300000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 906; 
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/407/,/home/xguo23/finally_2/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 76112; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1604#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 407+407"; 
+        CumulativeSlotTime = 2.513300000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 313; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 25132; 
+        ImageSize = 125000
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446121054; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.293400000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_37424,ChtcWrapper409.out,AuditLog.409,simu_3_409.txt,harvest.log,409.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.305100000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.787300000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134104; 
+        LastMatchTime = 1446121053; 
+        LastJobLeaseRenewal = 1446134104; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49583239; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=409 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134104; 
+        QDate = 1446106003; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409/process.log"; 
+        JobCurrentStartDate = 1446121053; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "409+409"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 13051; 
+        AutoClusterId = 24; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.242"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134104; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446121053; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583239.0#1446106003"; 
+        RemoteSysCpu = 9.300000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.305100000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e442.chtc.WISC.EDU"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/409/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 127216; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.242:38884>#1443991450#10456#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 409+409"; 
+        CumulativeSlotTime = 1.305100000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 13050; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1445943853; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.852360000000000E+05; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.843670000000000E+05; 
+        ResidentSetSize = 125000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.902470000000000E+05; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.076600000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134099; 
+        QDate = 1445938922; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134099; 
+        LastMatchTime = 1445943852; 
+        LastJobLeaseRenewal = 1446134099; 
+        DAGManNodesLog = "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49573720; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally/Simulation_condor/model_3/180/process.log"; 
+        JobCurrentStartDate = 1445943852; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "180+180"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 190247; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.72"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49572657; 
+        EnteredCurrentStatus = 1446134099; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1445943852; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49573720.0#1445938922"; 
+        RemoteSysCpu = 1.835000000000000E+03; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.902470000000000E+05; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e272.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 123680; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.72:29075>#1444753997#6000#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 180+180"; 
+        CumulativeSlotTime = 1.902470000000000E+05; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 190245; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/finally/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally/Simulation_condor/model_3/180"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446114726; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.908100000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 75000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "harvest.log,232.out,ChtcWrapper232.out,AuditLog.232,CURLTIME_1864147,simu_3_232.txt"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 118772; 
+        RemoteWallClockTime = 1.933800000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.791100000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 12; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 26436; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134062; 
+        QDate = 1446105779; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134062; 
+        LastMatchTime = 1446114724; 
+        LastJobLeaseRenewal = 1446134062; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582659; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232/process.log"; 
+        JobCurrentStartDate = 1446114724; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=232 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "232+232"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 19338; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134062; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446114724; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582659.0#1446105779"; 
+        RemoteSysCpu = 1.790000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.933800000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 615; 
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/232/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 71268; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1612#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 232+232"; 
+        CumulativeSlotTime = 1.933800000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 3; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216668; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 19336; 
+        ImageSize = 125000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446133964; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.200000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "R2011b_INFO,CODEBLOWUP,AuditLog.10012,SLIBS2.tar.gz,ChtcWrapper10012.out,CURLTIME_2575055,chtcinnerwrapper"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5128; 
+        RemoteWallClockTime = 7.700000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727355000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 160; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 160; 
+        LocalSysCpu = 0.0; 
+        WantGlidein = true; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/10012"; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 67; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134040; 
+        LastMatchTime = 1446133963; 
+        LastJobLeaseRenewal = 1446134040; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49583905; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=10012 --"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134040; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/10012/process.log"; 
+        JobCurrentStartDate = 1446133963; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        JobLeaseDuration = 2400; 
+        QDate = 1446133888; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "10012+10012"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 77; 
+        AutoClusterId = 38267; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.244.69"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134040; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446133963; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583905.0#1446133888"; 
+        RemoteSysCpu = 1.200000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 7.700000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 12; 
+        LastRemoteHost = "slot1_2@e189.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/10012/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5128; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.244.69:4177>#1444973293#3769#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 10012+10012"; 
+        CumulativeSlotTime = 7.700000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 12; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1211433; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 76; 
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446115779; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.811800000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.847170000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_3140097,ChtcWrapper3.out,AuditLog.3,simu_3_3.txt,harvest.log,3.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.824800000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.789600000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134026; 
+        QDate = 1446105835; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134026; 
+        LastMatchTime = 1446115778; 
+        LastJobLeaseRenewal = 1446134026; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582786; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3/process.log"; 
+        JobCurrentStartDate = 1446115778; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=3 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "3+3"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 18248; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.107"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582778; 
+        EnteredCurrentStatus = 1446134026; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446115778; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582786.0#1446105835"; 
+        RemoteSysCpu = 1.080000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.824800000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e307.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.46/Simulation_condor/data/3/,/home/xguo23/model_3_1.46/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 125940; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.107:63744>#1444685448#11070#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 3+3"; 
+        CumulativeSlotTime = 1.824800000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 18247; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.46/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3"
+    ]
+
+    [
+        BlockWrites = 506; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446133964; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.100000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "chtcinnerwrapper,SLIBS2.tar.gz,R2011b_INFO,AuditLog.20111,CURLTIME_1051736,ChtcWrapper20111.out,CODEBLOWUP"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5056; 
+        RemoteWallClockTime = 5.800000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727274000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 164; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 164; 
+        LocalSysCpu = 0.0; 
+        WantGlidein = true; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/20111"; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 43; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 506; 
+        CompletionDate = 1446134021; 
+        LastMatchTime = 1446133963; 
+        LastJobLeaseRenewal = 1446134021; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49583938; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=20111 --"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134021; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/20111/process.log"; 
+        JobCurrentStartDate = 1446133963; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        JobLeaseDuration = 2400; 
+        QDate = 1446133922; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "20111+20111"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 58; 
+        AutoClusterId = 38259; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.244.37"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 249656; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134021; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 249656; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446133963; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583938.0#1446133922"; 
+        RemoteSysCpu = 7.000000000000000E+00; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 5.800000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 16; 
+        LastRemoteHost = "slot1_10@e168.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/20111/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5056; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.244.37:57713>#1445396629#2313#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 20111+20111"; 
+        CumulativeSlotTime = 5.800000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 16; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1205568; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 52; 
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446115115; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.878200000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 125000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_2890029,ChtcWrapper260.out,AuditLog.260,simu_3_260.txt,harvest.log,260.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.890300000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.050700000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134017; 
+        QDate = 1446105803; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134017; 
+        LastMatchTime = 1446115114; 
+        LastJobLeaseRenewal = 1446134017; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582724; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/260/process.log"; 
+        JobCurrentStartDate = 1446115114; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=260 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "260+260"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 

<TRUNCATED>


[15/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java
new file mode 100644
index 0000000..97ebd6c
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/BuiltinClassAdFunctions.java
@@ -0,0 +1,1927 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.io.IOException;
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.ArrayList;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Random;
+import java.util.Set;
+import java.util.TimeZone;
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+import org.apache.asterix.external.classad.ExprTree.NodeKind;
+import org.apache.asterix.external.classad.Value.ValueType;
+import org.apache.asterix.external.library.ClassAdParser;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class BuiltinClassAdFunctions {
+
+    public static final ClassAdFunc IsType = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            // need a single argument
+            if (argList.size() != 1) {
+                val.setErrorValue();
+                return true;
+            }
+            // Evaluate the argument
+            if (!argList.getExprList().get(0).publicEvaluate(state, val)) {
+                val.setErrorValue();
+                return false;
+            }
+            // check if the value was of the required type
+            switch (name.toLowerCase()) {
+                case "isundefined":
+                    val.setBooleanValue(val.isUndefinedValue());
+                    break;
+                case "iserror":
+                    val.setBooleanValue(val.isErrorValue());
+                    break;
+                case "isinteger":
+                    val.setBooleanValue(val.isIntegerValue());
+                    break;
+                case "isstring":
+                    val.setBooleanValue(val.isStringValue());
+                    break;
+                case "isreal":
+                    val.setBooleanValue(val.isRealValue());
+                    break;
+                case "isboolean":
+                    val.setBooleanValue(val.isBooleanValue());
+                    break;
+                case "isclassad":
+                    val.setBooleanValue(val.isClassAdValue());
+                    break;
+                case "islist":
+                    val.setBooleanValue(val.isListValue());
+                    break;
+                case "isabstime":
+                    val.setBooleanValue(val.isAbsoluteTimeValue());
+                    break;
+                case "isreltime":
+                    val.setBooleanValue(val.isRelativeTimeValue());
+                    break;
+                default:
+                    val.setErrorValue();
+            }
+            return (true);
+        }
+    };
+    public static final ClassAdFunc TestMember = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            Value arg0 = new Value();
+            Value arg1 = new Value();
+            Value cArg = new Value();
+
+            ExprList el = new ExprList();
+            MutableBoolean b = new MutableBoolean();
+            boolean useIS = name.equalsIgnoreCase("identicalmember");
+
+            // need two arguments
+            if (argList.size() != 2) {
+                val.setErrorValue();
+                return (true);
+            }
+
+            // Evaluate the arg list
+            if (!argList.get(0).publicEvaluate(state, arg0) || !argList.get(1).publicEvaluate(state, arg1)) {
+                val.setErrorValue();
+                return false;
+            }
+
+            // if the second arg (a list) is undefined, or the first arg is
+            // undefined and we're supposed to test for strict comparison, the
+            // result is 'undefined'
+            if (arg1.isUndefinedValue() || (!useIS && arg0.isUndefinedValue())) {
+                val.setUndefinedValue();
+                return true;
+            }
+
+            // Swap
+            if (arg0.isListValue() && !arg1.isListValue()) {
+                Value swap = new Value();
+                swap.copyFrom(arg0);
+                arg0.copyFrom(arg1);
+                arg1.copyFrom(swap);
+            }
+
+            // arg1 must be a list; arg0 must be comparable
+            if (!arg1.isListValue() || arg0.isListValue() || arg0.isClassAdValue()) {
+                val.setErrorValue();
+                return true;
+            }
+
+            // if we're using strict comparison, arg0 can't be 'error'
+            if (!useIS && arg0.isErrorValue()) {
+                val.setErrorValue();
+                return (true);
+            }
+
+            // check for membership
+            arg1.isListValue(el);
+            for (ExprTree tree : el.getExprList()) {
+                if (!tree.publicEvaluate(state, cArg)) {
+                    val.setErrorValue();
+                    return (false);
+                }
+                Operation.operate(useIS ? Operation.OpKind_IS_OP : Operation.OpKind_EQUAL_OP, cArg, arg0, val);
+                if (val.isBooleanValue(b) && b.booleanValue()) {
+                    return true;
+                }
+            }
+            val.setBooleanValue(false);
+            return true;
+        }
+    };
+    public static final ClassAdFunc Size = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            Value arg = new Value();
+            ExprList listToSize = new ExprList();
+            ClassAd classadToSize = new ClassAd();
+            AMutableInt32 length = new AMutableInt32(0);
+            // we accept only one argument
+            if (argList.size() != 1) {
+                val.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                val.setErrorValue();
+                return false;
+            } else if (arg.isUndefinedValue()) {
+                val.setUndefinedValue();
+                return true;
+            } else if (arg.isListValue(listToSize)) {
+                val.setIntegerValue(listToSize.size());
+                return true;
+            } else if (arg.isClassAdValue(classadToSize)) {
+                val.setIntegerValue(classadToSize.size());
+                return true;
+            } else if (arg.isStringValue(length)) {
+                val.setIntegerValue(length.getIntegerValue().intValue());
+                return true;
+            } else {
+                val.setErrorValue();
+                return true;
+            }
+        }
+    };
+    public static final ClassAdFunc SumAvg = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+
+            Value listElementValue = new Value();
+            Value listVal = new Value();
+            Value numElements = new Value();
+            Value result = new Value();
+            ExprList listToSum = new ExprList();
+            MutableBoolean first = new MutableBoolean();
+            AMutableInt64 len = new AMutableInt64(0);
+            boolean onlySum = name.equalsIgnoreCase("sum");
+
+            // we accept only one argument
+            if (argList.size() != 1) {
+                val.setErrorValue();
+                return (true);
+            }
+
+            // argument must Evaluate to a list
+            if (!argList.get(0).publicEvaluate(state, listVal)) {
+                val.setErrorValue();
+                return false;
+            } else if (listVal.isUndefinedValue()) {
+                val.setUndefinedValue();
+                return true;
+            } else if (!listVal.isListValue(listToSum)) {
+                val.setErrorValue();
+                return (true);
+            }
+
+            result.setUndefinedValue();
+            len.setValue(0);
+            first.setValue(true);
+
+            // Walk over each element in the list, and sum.
+            for (ExprTree listElement : listToSum.getExprList()) {
+                len.setValue(len.getLongValue() + 1);;
+                // Make sure this element is a number.
+                if (!listElement.publicEvaluate(state, listElementValue)) {
+                    val.setErrorValue();
+                    return false;
+                } else if (!listElementValue.isRealValue() && !listElementValue.isIntegerValue()) {
+                    val.setErrorValue();
+                    return true;
+                }
+
+                // Either take the number if it's the first,
+                // or add to the running sum.
+                if (first.booleanValue()) {
+                    result.copyFrom(listElementValue);
+                    first.setValue(false);
+                } else {
+                    Operation.operate(Operation.OpKind_ADDITION_OP, result, listElementValue, result);
+                }
+
+            }
+
+            // if the sum() function was called, we don't need to find the average
+            if (onlySum) {
+                val.copyFrom(result);
+                return true;
+            }
+
+            if (len.getLongValue() > 0) {
+                numElements.setRealValue(len.getLongValue());
+                Operation.operate(Operation.OpKind_DIVISION_OP, result, numElements, result);
+            } else {
+                val.setUndefinedValue();
+            }
+
+            val.copyFrom(result);
+            return true;
+        }
+
+    };
+    public static final ClassAdFunc MinMax = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String fn, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            Value listElementValue = new Value();
+            Value listVal = new Value();
+            Value cmp = new Value();
+            Value result = new Value();
+            ExprList listToBound = new ExprList();
+            boolean first = true;
+            MutableBoolean b = new MutableBoolean(false);
+            int comparisonOperator;
+
+            // we accept only one argument
+            if (argList.size() != 1) {
+                val.setErrorValue();
+                return true;
+            }
+
+            // first argument must Evaluate to a list
+            if (!argList.get(0).publicEvaluate(state, listVal)) {
+                val.setErrorValue();
+                return false;
+            } else if (listVal.isUndefinedValue()) {
+                val.setUndefinedValue();
+                return true;
+            } else if (!listVal.isListValue(listToBound)) {
+                val.setErrorValue();
+                return true;
+            }
+
+            // fn is either "min..." or "max..."
+            if (Character.toLowerCase(fn.charAt(1)) == 'i') {
+                comparisonOperator = Operation.OpKind_LESS_THAN_OP;
+            } else {
+                comparisonOperator = Operation.OpKind_GREATER_THAN_OP;
+            }
+
+            result.setUndefinedValue();
+
+            // Walk over the list, calculating the bound the whole way.
+            for (ExprTree listElement : listToBound.getExprList()) {
+                // For this element of the list, make sure it is
+                // acceptable.
+                if (!listElement.publicEvaluate(state, listElementValue)) {
+                    val.setErrorValue();
+                    return false;
+                } else if (!listElementValue.isRealValue() && !listElementValue.isIntegerValue()) {
+                    val.setErrorValue();
+                    return true;
+                }
+
+                // If it's the first element, copy it to the bound,
+                // otherwise compare to decide what to do.
+                if (first) {
+                    result.copyFrom(listElementValue);
+                    first = false;
+                } else {
+                    Operation.operate(comparisonOperator, listElementValue, result, cmp);
+                    if (cmp.isBooleanValue(b) && b.booleanValue()) {
+                        result.copyFrom(listElementValue);
+                    }
+                }
+            }
+
+            val.copyFrom(result);
+            return true;
+        }
+
+    };
+    public static final ClassAdFunc ListCompare = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String fn, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+
+            Value listElementValue = new Value();
+            Value listVal = new Value();
+            Value compareVal = new Value();
+            Value stringValue = new Value();
+            ExprList listToCompare = new ExprList();
+            boolean needAllMatch;
+            AMutableCharArrayString comparison_string = new AMutableCharArrayString();
+            int comparisonOperator;
+
+            // We take three arguments:
+            // The operator to use, as a string.
+            // The list
+            // The thing we are comparing against.
+            if (argList.size() != 3) {
+                val.setErrorValue();
+                return true;
+            }
+
+            // The first argument must be a string
+            if (!argList.get(0).publicEvaluate(state, stringValue)) {
+                val.setErrorValue();
+                return false;
+            } else if (stringValue.isUndefinedValue()) {
+                val.setUndefinedValue();
+                return true;
+            } else if (!stringValue.isStringValue(comparison_string)) {
+                val.setErrorValue();
+                return true;
+            }
+
+            // Decide which comparison to do, or give an error
+            switch (comparison_string.toString()) {
+                case "<":
+                    comparisonOperator = Operation.OpKind_LESS_THAN_OP;
+                    break;
+                case "<=":
+                    comparisonOperator = Operation.OpKind_LESS_OR_EQUAL_OP;
+                    break;
+                case "!=":
+                    comparisonOperator = Operation.OpKind_NOT_EQUAL_OP;
+                    break;
+                case "==":
+                    comparisonOperator = Operation.OpKind_EQUAL_OP;
+                    break;
+                case ">":
+                    comparisonOperator = Operation.OpKind_GREATER_THAN_OP;
+                    break;
+                case ">=":
+                    comparisonOperator = Operation.OpKind_GREATER_OR_EQUAL_OP;
+                    break;
+                case "is":
+                    comparisonOperator = Operation.OpKind_META_EQUAL_OP;
+                    break;
+                case "isnt":
+                    comparisonOperator = Operation.OpKind_META_NOT_EQUAL_OP;
+                    break;
+                default:
+                    val.setErrorValue();
+                    return true;
+            }
+
+            // The second argument must Evaluate to a list
+            if (!argList.get(1).publicEvaluate(state, listVal)) {
+                val.setErrorValue();
+                return false;
+            } else if (listVal.isUndefinedValue()) {
+                val.setUndefinedValue();
+                return true;
+            } else if (!listVal.isListValue(listToCompare)) {
+                val.setErrorValue();
+                return true;
+            }
+
+            // The third argument is something to compare against.
+            if (!argList.get(2).publicEvaluate(state, compareVal)) {
+                val.setErrorValue();
+                return false;
+            } else if (listVal.isUndefinedValue()) {
+                val.setUndefinedValue();
+                return true;
+            }
+
+            // Finally, we decide what to do exactly, based on our name.
+            if (fn.equalsIgnoreCase("anycompare")) {
+                needAllMatch = false;
+                val.setBooleanValue(false);
+            } else {
+                needAllMatch = true;
+                val.setBooleanValue(true);
+            }
+
+            // Walk over the list
+            for (ExprTree listElement : listToCompare.getExprList()) {
+                // For this element of the list, make sure it is
+                // acceptable.
+                if (!listElement.publicEvaluate(state, listElementValue)) {
+                    val.setErrorValue();
+                    return false;
+                } else {
+                    Value compareResult = new Value();
+                    MutableBoolean b = new MutableBoolean();
+
+                    Operation.operate(comparisonOperator, listElementValue, compareVal, compareResult);
+                    if (!compareResult.isBooleanValue(b)) {
+                        if (compareResult.isUndefinedValue()) {
+                            if (needAllMatch) {
+                                val.setBooleanValue(false);
+                                return true;
+                            }
+                        } else {
+                            val.setErrorValue();
+                            return true;
+                        }
+                        return true;
+                    } else if (b.booleanValue()) {
+                        if (!needAllMatch) {
+                            val.setBooleanValue(true);
+                            return true;
+                        }
+                    } else {
+                        if (needAllMatch) {
+                            // we failed, because it didn't match
+                            val.setBooleanValue(false);
+                            return true;
+                        }
+                    }
+                }
+            }
+
+            if (needAllMatch) {
+                // They must have all matched, because nothing failed,
+                // which would have returned.
+                val.setBooleanValue(true);
+            } else {
+                // Nothing must have matched, since we would have already
+                // returned.
+                val.setBooleanValue(false);
+            }
+
+            return true;
+        }
+
+    };
+    public static final ClassAdFunc timeZoneOffset = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            // no arguments
+            if (argList.size() > 0) {
+                val.setErrorValue();
+                return (true);
+            }
+            val.setRelativeTimeValue(new ClassAdTime());
+            return (true);
+        }
+    };
+    public static final ClassAdFunc debug = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            return false;
+        }
+    };
+    public static final ClassAdFunc formatTime = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value time_arg = new Value();
+            Value format_arg = new Value();
+            AMutableInt64 int64 = new AMutableInt64(0);
+            ClassAdTime epoch_time = new ClassAdTime();
+            ClassAdTime time_components = new ClassAdTime("GMT");
+            ClassAd splitClassAd = new ClassAd();
+            String format;
+            int number_of_args;
+            boolean did_eval;
+            did_eval = true;
+            number_of_args = argList.size();
+            if (number_of_args == 0) {
+                epoch_time.setEpochTime();
+                Util.getLocalTime(epoch_time, time_components);
+                format = "%c";
+                make_formatted_time(time_components, format, result);
+            } else if (number_of_args < 3) {
+                // The first argument should be our time and should
+                // not be a relative time.
+                if (!argList.get(0).publicEvaluate(state, time_arg)) {
+                    did_eval = false;
+                } else if (time_arg.isRelativeTimeValue()) {
+                    result.setErrorValue();
+                } else if (time_arg.isAbsoluteTimeValue(time_components)) {
+                } else if (!time_arg.isClassAdValue(splitClassAd) /*doSplitTime(time_arg, splitClassAd)*/) {
+                    result.setErrorValue();
+                } else {
+                    if (!splitClassAd.evaluateAttrInt("Seconds", int64)) {
+                        time_components.setSeconds(0);
+                    } else {
+                        time_components.setSeconds((int) int64.getLongValue());
+                    }
+                    if (!splitClassAd.evaluateAttrInt("Minutes", int64)) {
+                        time_components.setMinutes(0);
+                    } else {
+                        time_components.setMinutes((int) int64.getLongValue());
+                    }
+                    if (!splitClassAd.evaluateAttrInt("Hours", int64)) {
+                        time_components.setHours(0);
+                    } else {
+                        time_components.setHours((int) int64.getLongValue());
+                    }
+                    if (!splitClassAd.evaluateAttrInt("Day", int64)) {
+                        time_components.setDayOfMonth(0);
+                    } else {
+                        time_components.setDayOfMonth((int) int64.getLongValue());
+                    }
+                    if (!splitClassAd.evaluateAttrInt("Month", int64)) {
+                        time_components.setMonth(0);
+                    } else {
+                        time_components.setMonth((int) int64.getLongValue() - 1);
+                    }
+                    if (!splitClassAd.evaluateAttrInt("Year", int64)) {
+                        time_components.setYear(0);
+                    } else {
+                        time_components.setYear((int) int64.getLongValue());
+                    }
+                }
+
+                // The second argument, if provided, must be a string
+                if (number_of_args == 1) {
+                    format = "EEE MMM dd HH:mm:ss yyyy";
+                    make_formatted_time(time_components, format, result);
+                } else {
+                    if (!argList.get(1).publicEvaluate(state, format_arg)) {
+                        did_eval = false;
+                    } else {
+                        AMutableCharArrayString formatString = new AMutableCharArrayString();
+                        if (!format_arg.isStringValue(formatString)) {
+                            result.setErrorValue();
+                        } else {
+                            make_formatted_time(time_components, formatString.toString(), result);
+                        }
+                    }
+                }
+            } else {
+                result.setErrorValue();
+            }
+            if (!did_eval) {
+                result.setErrorValue();
+            }
+            return did_eval;
+        }
+    };
+
+    public static void make_formatted_time(ClassAdTime time_components, String format, Value result) {
+        //replace c++ format elements with java elements
+        format = format.replace("%m", "MM");
+        format = format.replace("%d", "dd");
+        format = format.replace("%Y", "yyyy");
+        format = format.replace("%M", "mm");
+        format = format.replace("%S", "ss");
+        format = format.replace("%A", "EEEE");
+        format = format.replace("%a", "EEE");
+        format = format.replace("%B", "MMMM");
+        format = format.replace("%b", "MMM");
+        format = format.replace("%H", "HH");
+        format = format.replace("%Y", "y");
+        format = format.replace("%", "");
+        DateFormat df = new SimpleDateFormat(format);
+        df.setTimeZone(TimeZone.getTimeZone("GMT"));
+        result.setStringValue(df.format(time_components.getCalendar().getTime()));
+    }
+
+    public static final ClassAdFunc getField = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            Value arg = new Value();
+            ClassAdTime asecs = new ClassAdTime();
+            ClassAdTime rsecs = new ClassAdTime();
+            ClassAdTime clock = new ClassAdTime();
+            ClassAdTime tms = new ClassAdTime();
+
+            if (argList.size() != 1) {
+                val.setErrorValue();
+                return (true);
+            }
+
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                val.setErrorValue();
+                return false;
+            }
+            if (arg.isAbsoluteTimeValue(asecs)) {
+                clock.setValue(asecs);
+                Util.getLocalTime(clock, tms);
+                switch (name) {
+
+                    case "getyear":
+                        // tm_year is years since 1900 --- make it y2k compliant :-)
+                        val.setIntegerValue(tms.getYear());
+                        break;
+                    case "getmonth":
+                        val.setIntegerValue(tms.getMonth() + 1);
+                        break;
+                    case "getdayofyear":
+                        val.setIntegerValue(tms.getDayOfYear());
+                        break;
+                    case "getdayofmonth":
+                        val.setIntegerValue(tms.getDayOfMonth());
+                        break;
+                    case "getdayofweek":
+                        val.setIntegerValue(tms.getDayOfWeek());
+                        break;
+                    case "gethours":
+                        val.setIntegerValue(tms.getHours());
+                        break;
+                    case "getminutes":
+                        val.setIntegerValue(tms.getMinutes());
+                        break;
+                    case "getseconds":
+                        val.setIntegerValue(tms.getSeconds());
+                        break;
+                    case "getdays":
+                    case "getuseconds":
+                        // not meaningful for abstimes
+                        val.setErrorValue();
+                        return true;
+                    default:
+                        throw new HyracksDataException("Should not reach here");
+                }
+                return (true);
+            } else if (arg.isRelativeTimeValue(rsecs)) {
+                switch (name.toLowerCase()) {
+                    case "getyear":
+                    case "getmonth":
+                    case "getdayofmonth":
+                    case "getdayofweek":
+                    case "getdayofyear":
+                        // not meaningful for reltimes
+                        val.setErrorValue();
+                        return true;
+                    case "getdays":
+                        val.setIntegerValue(rsecs.getRelativeTime() / 86400);
+                        break;
+                    case "gethours":
+                        val.setIntegerValue((rsecs.getRelativeTime() % 86400) / 3600);
+                        break;
+                    case "getminutes":
+                        val.setIntegerValue((rsecs.getRelativeTime() % 3600) / 60);
+                        break;
+                    case "getseconds":
+                        val.setIntegerValue(rsecs.getRelativeTime() % 60);
+                        break;
+                    default:
+                        throw new HyracksDataException("Should not reach here");
+                }
+                return true;
+            }
+
+            val.setErrorValue();
+            return (true);
+        }
+
+    };
+    public static final ClassAdFunc currentTime = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            // no arguments
+            if (argList.size() > 0) {
+                val.setErrorValue();
+                return (true);
+            }
+            Literal time_literal = Literal.createAbsTime(new ClassAdTime());
+            time_literal.GetValue(val);
+            return true;
+        }
+    };
+    public static final ClassAdFunc splitTime = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+            ClassAd split = new ClassAd();
+
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return true;
+            }
+
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return false;
+            }
+
+            if (!arg.isClassAdValue() && doSplitTime(arg, split)) {
+                result.setClassAdValue(split);
+            } else {
+                result.setErrorValue();
+            }
+            return true;
+        }
+
+    };
+
+    public static boolean doSplitTime(Value time, ClassAd splitClassAd) throws HyracksDataException {
+        boolean did_conversion;
+        AMutableInt64 integer = new AMutableInt64(0);
+        AMutableDouble real = new AMutableDouble(0);
+        ClassAdTime asecs = new ClassAdTime();
+        ClassAdTime rsecs = new ClassAdTime();
+        ClassAd classad = new ClassAd();
+        did_conversion = true;
+        if (time.isIntegerValue(integer)) {
+            asecs.setValue(integer.getLongValue());
+            asecs.makeLocalAbsolute();
+            absTimeToClassAd(asecs, splitClassAd);
+        } else if (time.isRealValue(real)) {
+            asecs.setValue((long) real.getDoubleValue());
+            asecs.makeAbsolute(true);
+            absTimeToClassAd(asecs, splitClassAd);
+        } else if (time.isAbsoluteTimeValue(asecs)) {
+            absTimeToClassAd(asecs, splitClassAd);
+        } else if (time.isRelativeTimeValue(rsecs)) {
+            relTimeToClassAd((rsecs.getRelativeTime() / 1000.0), splitClassAd);
+        } else if (time.isClassAdValue(classad)) {
+            splitClassAd = new ClassAd();
+            splitClassAd.copyFrom(classad);
+        } else {
+            did_conversion = false;
+        }
+        return did_conversion;
+    }
+
+    public static void relTimeToClassAd(double rsecs, ClassAd splitClassAd) throws HyracksDataException {
+        int days, hrs, mins;
+        double secs;
+        boolean is_negative;
+
+        if (rsecs < 0) {
+            rsecs = -rsecs;
+            is_negative = true;
+        } else {
+            is_negative = false;
+        }
+        days = (int) rsecs;
+        hrs = days % 86400;
+        mins = hrs % 3600;
+        secs = (mins % 60) + (rsecs - Math.floor(rsecs));
+        days = days / 86400;
+        hrs = hrs / 3600;
+        mins = mins / 60;
+        if (is_negative) {
+            if (days > 0) {
+                days = -days;
+            } else if (hrs > 0) {
+                hrs = -hrs;
+            } else if (mins > 0) {
+                mins = -mins;
+            } else {
+                secs = -secs;
+            }
+        }
+        splitClassAd.insertAttr("Type", "RelativeTime");
+        splitClassAd.insertAttr("Days", days);
+        splitClassAd.insertAttr("Hours", hrs);
+        splitClassAd.insertAttr("Minutes", mins);
+        splitClassAd.insertAttr("Seconds", secs);
+        return;
+    }
+
+    public static void absTimeToClassAd(ClassAdTime asecs, ClassAd splitClassAd) throws HyracksDataException {
+        ClassAdTime tms = asecs.getGMTCopy();
+        splitClassAd.insertAttr("Type", "AbsoluteTime");
+        splitClassAd.insertAttr("Year", tms.getYear());
+        splitClassAd.insertAttr("Month", tms.getMonth() + 1);
+        splitClassAd.insertAttr("Day", tms.getDayOfMonth());
+        splitClassAd.insertAttr("Hours", tms.getHours());
+        splitClassAd.insertAttr("Minutes", tms.getMinutes());
+        splitClassAd.insertAttr("Seconds", tms.getSeconds());
+        // Note that we convert the timezone from seconds to minutes.
+        splitClassAd.insertAttr("Offset", asecs.getOffset() / 1000);
+        return;
+    }
+
+    public static final ClassAdFunc dayTime = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            val.setRelativeTimeValue(new ClassAdTime());
+            return (true);
+        }
+    };
+    public static final ClassAdFunc epochTime = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException {
+            // no arguments
+            if (argList.size() > 0) {
+                val.setErrorValue();
+                return (true);
+            }
+            val.setIntegerValue(0);
+            return (true);
+        }
+    };
+    public static final ClassAdFunc strCat = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            AMutableCharArrayString buf = new AMutableCharArrayString();
+            AMutableCharArrayString s = new AMutableCharArrayString();
+            boolean errorFlag = false;
+            boolean undefFlag = false;
+            boolean rval = false;
+
+            Value val = new Value();
+            Value stringVal = new Value();
+
+            for (int i = 0; i < argList.size(); i++) {
+
+                s.reset();
+                if (!(rval = argList.get(i).publicEvaluate(state, val))) {
+                    break;
+                }
+
+                if (val.isStringValue(s)) {
+                    buf.appendString(s);
+                } else {
+                    Value.convertValueToStringValue(val, stringVal);
+                    if (stringVal.isUndefinedValue()) {
+                        undefFlag = true;
+                        break;
+                    } else if (stringVal.isErrorValue()) {
+                        errorFlag = true;
+                        result.setErrorValue();
+                        break;
+                    } else if (stringVal.isStringValue(s)) {
+                        buf.appendString(s);
+                    } else {
+                        errorFlag = true;
+                        break;
+                    }
+                }
+            }
+
+            // failed evaluating some argument
+            if (!rval) {
+                result.setErrorValue();
+                return (false);
+            }
+            // type error
+            if (errorFlag) {
+                result.setErrorValue();
+                return (true);
+            }
+            // some argument was undefined
+            if (undefFlag) {
+                result.setUndefinedValue();
+                return (true);
+            }
+
+            result.setStringValue(buf);
+            return (true);
+        }
+    };
+    public static final ClassAdFunc changeCase = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value val = new Value();
+            Value stringVal = new Value();
+            AMutableCharArrayString str = new AMutableCharArrayString();
+            boolean lower = name.equalsIgnoreCase("tolower");
+            int len;
+
+            // only one argument
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return true;
+            }
+
+            // check for evaluation failure
+            if (!argList.get(0).publicEvaluate(state, val)) {
+                result.setErrorValue();
+                return false;
+            }
+
+            if (!val.isStringValue(str)) {
+                Value.convertValueToStringValue(val, stringVal);
+                if (stringVal.isUndefinedValue()) {
+                    result.setUndefinedValue();
+                    return true;
+                } else if (stringVal.isErrorValue()) {
+                    result.setErrorValue();
+                    return true;
+                } else if (!stringVal.isStringValue(str)) {
+                    result.setErrorValue();
+                    return true;
+                }
+            }
+            len = str.size();
+            for (int i = 0; i <= len; i++) {
+                str.setChar(i, lower ? Character.toLowerCase(str.charAt(i)) : Character.toUpperCase(str.charAt(i)));
+            }
+            result.setStringValue(str);
+            return (true);
+        }
+    };
+    public static final ClassAdFunc subString = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg0 = new Value();
+            Value arg1 = new Value();
+            Value arg2 = new Value();
+            AMutableCharArrayString buf = new AMutableCharArrayString();
+            AMutableInt64 offset = new AMutableInt64(0);
+            AMutableInt64 len = new AMutableInt64(0);
+            AMutableInt64 alen = new AMutableInt64(0);
+
+            // two or three arguments
+            if (argList.size() < 2 || argList.size() > 3) {
+                result.setErrorValue();
+                return (true);
+            }
+
+            // Evaluate all arguments
+            if (!argList.get(0).publicEvaluate(state, arg0) || !argList.get(1).publicEvaluate(state, arg1)
+                    || (argList.size() > 2 && !argList.get(2).publicEvaluate(state, arg2))) {
+                result.setErrorValue();
+                return (false);
+            }
+
+            // strict on undefined
+            if (arg0.isUndefinedValue() || arg1.isUndefinedValue() || (argList.size() > 2 && arg2.isUndefinedValue())) {
+                result.setUndefinedValue();
+                return (true);
+            }
+
+            // arg0 must be string, arg1 must be int, arg2 (if given) must be int
+            if (!arg0.isStringValue(buf) || !arg1.isIntegerValue(offset)
+                    || (argList.size() > 2 && !arg2.isIntegerValue(len))) {
+                result.setErrorValue();
+                return (true);
+            }
+
+            // perl-like substr; negative offsets and lengths count from the end
+            // of the string
+            alen.setValue(buf.size());
+            if (offset.getLongValue() < 0) {
+                offset.setValue(alen.getLongValue() + offset.getLongValue());
+            } else if (offset.getLongValue() >= alen.getLongValue()) {
+                offset.setValue(alen.getLongValue());
+            }
+            if (len.getLongValue() <= 0) {
+                len.setValue(alen.getLongValue() - offset.getLongValue() + len.getLongValue());
+                if (len.getLongValue() < 0) {
+                    len.setValue(0);
+                }
+            } else if (len.getLongValue() > alen.getLongValue() - offset.getLongValue()) {
+                len.setValue(alen.getLongValue() - offset.getLongValue());
+            }
+
+            // to make sure that if length is specified as 0 explicitly
+            // then, len is set to 0
+            if (argList.size() == 3) {
+                AMutableInt64 templen = new AMutableInt64(0);
+                arg2.isIntegerValue(templen);
+                if (templen.getLongValue() == 0)
+                    len.setValue(0);
+            }
+            result.setStringValue(buf.substr((int) offset.getLongValue(), (int) len.getLongValue()));
+            return (true);
+        }
+    };
+    public static final ClassAdFunc convInt = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+            // takes exactly one argument
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return (false);
+            }
+            Value.convertValueToIntegerValue(arg, result);
+            return true;
+        }
+    };
+    public static final ClassAdFunc compareString = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg0 = new Value();
+            Value arg1 = new Value();
+            Value arg0_s = new Value();
+            Value arg1_s = new Value();
+
+            // Must have two arguments
+            if (argList.size() != 2) {
+                result.setErrorValue();
+                return (true);
+            }
+
+            // Evaluate both arguments
+            if (!argList.get(0).publicEvaluate(state, arg0) || !argList.get(1).publicEvaluate(state, arg1)) {
+                result.setErrorValue();
+                return false;
+            }
+
+            // If either argument is undefined, then the result is
+            // undefined.
+            if (arg0.isUndefinedValue() || arg1.isUndefinedValue()) {
+                result.setUndefinedValue();
+                return true;
+            }
+
+            AMutableCharArrayString s0 = new AMutableCharArrayString();
+            AMutableCharArrayString s1 = new AMutableCharArrayString();
+            if (Value.convertValueToStringValue(arg0, arg0_s) && Value.convertValueToStringValue(arg1, arg1_s)
+                    && arg0_s.isStringValue(s0) && arg1_s.isStringValue(s1)) {
+
+                int order;
+
+                if (name.equalsIgnoreCase("strcmp")) {
+                    order = s0.compareTo(s1);
+                    if (order < 0)
+                        order = -1;
+                    else if (order > 0)
+                        order = 1;
+                } else {
+                    order = s0.compareToIgnoreCase(s1);
+                    if (order < 0)
+                        order = -1;
+                    else if (order > 0)
+                        order = 1;
+                }
+                result.setIntegerValue(order);
+            } else {
+                result.setErrorValue();
+            }
+
+            return (true);
+        }
+    };
+    public static final ClassAdFunc matchPattern = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            boolean have_options;
+            Value arg0 = new Value();
+            Value arg1 = new Value();
+            Value arg2 = new Value();
+
+            AMutableCharArrayString pattern = new AMutableCharArrayString();
+            AMutableCharArrayString target = new AMutableCharArrayString();
+            AMutableCharArrayString options_string = new AMutableCharArrayString();
+
+            // need two or three arguments: pattern, string, optional settings
+            if (argList.size() != 2 && argList.size() != 3) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (argList.size() == 2) {
+                have_options = false;
+            } else {
+                have_options = true;
+            }
+
+            // Evaluate args
+            if (!argList.get(0).publicEvaluate(state, arg0) || !argList.get(1).publicEvaluate(state, arg1)) {
+                result.setErrorValue();
+                return (false);
+            }
+            if (have_options && !argList.get(2).publicEvaluate(state, arg2)) {
+                result.setErrorValue();
+                return (false);
+            }
+
+            // if either arg is error, the result is error
+            if (arg0.isErrorValue() || arg1.isErrorValue()) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (have_options && arg2.isErrorValue()) {
+                result.setErrorValue();
+                return (true);
+            }
+
+            // if either arg is undefined, the result is undefined
+            if (arg0.isUndefinedValue() || arg1.isUndefinedValue()) {
+                result.setUndefinedValue();
+                return (true);
+            }
+            if (have_options && arg2.isUndefinedValue()) {
+                result.setUndefinedValue();
+                return (true);
+            } else if (have_options && !arg2.isStringValue(options_string)) {
+                result.setErrorValue();
+                return (true);
+            }
+
+            // if either argument is not a string, the result is an error
+            if (!arg0.isStringValue(pattern) || !arg1.isStringValue(target)) {
+                result.setErrorValue();
+                return (true);
+            }
+            return regexp_helper(pattern.toString(), target, null, have_options, options_string.toString(), result);
+        }
+    };
+
+    private static boolean regexp_helper(String pattern, AMutableCharArrayString target, String replace,
+            boolean have_options, String options_string, Value result) {
+        int options = 0;
+        //pattern = pattern.replaceAll("");
+        //pattern = pattern.replaceAll("?", ".");
+        Pattern re;
+        if (have_options) {
+            // We look for the options we understand, and ignore
+            // any others that we might find, hopefully allowing
+            // forwards compatibility.
+            if (options_string.contains("i")) {
+                options |= Pattern.CASE_INSENSITIVE;
+            }
+        }
+        // compile the patern
+        re = Pattern.compile(pattern, options);
+        Matcher matcher = re.matcher(target.toString());
+        if (matcher.matches()) {
+            result.setBooleanValue(true);
+            return (true);
+        } else {
+            result.setBooleanValue(false);
+            return (true);
+        }
+    }
+
+    public static final ClassAdFunc matchPatternMember = null;
+    public static final ClassAdFunc substPattern = null;
+    public static final ClassAdFunc convReal = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+
+            // takes exactly one argument
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return (false);
+            }
+            Value.convertValueToRealValue(arg, result);
+            return true;
+        }
+    };
+    public static final ClassAdFunc convString = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+
+            // takes exactly one argument
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return (false);
+            }
+
+            Value.convertValueToStringValue(arg, result);
+            return true;
+        }
+    };
+    public static final ClassAdFunc unparse = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            if (argList.size() != 1 || argList.get(0).getKind() != NodeKind.ATTRREF_NODE) {
+                result.setErrorValue();
+            } else {
+
+                // use the printpretty on arg0 to spew out
+                PrettyPrint unp = new PrettyPrint();
+                AMutableCharArrayString szAttribute = new AMutableCharArrayString();
+                AMutableCharArrayString szValue = new AMutableCharArrayString();
+                ExprTree pTree;
+
+                unp.unparse(szAttribute, argList.get(0));
+                // look them up argument within context of the ad.
+                if (state.getCurAd() != null && (pTree = state.getCurAd().lookup(szAttribute.toString())) != null) {
+                    unp.unparse(szValue, pTree);
+                }
+
+                result.setStringValue(szValue);
+            }
+
+            return (true);
+        }
+    };
+    public static final ClassAdFunc convBool = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+            // takes exactly one argument
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return (false);
+            }
+
+            switch (arg.getType()) {
+                case UNDEFINED_VALUE:
+                    result.setUndefinedValue();
+                    return (true);
+
+                case ERROR_VALUE:
+                case CLASSAD_VALUE:
+                case LIST_VALUE:
+                case SLIST_VALUE:
+                case ABSOLUTE_TIME_VALUE:
+                    result.setErrorValue();
+                    return (true);
+
+                case BOOLEAN_VALUE:
+                    result.copyFrom(arg);
+                    return (true);
+
+                case INTEGER_VALUE: {
+                    AMutableInt64 ival = new AMutableInt64(0);
+                    arg.isIntegerValue(ival);
+                    result.setBooleanValue(ival.getLongValue() != 0);
+                    return (true);
+                }
+
+                case REAL_VALUE: {
+                    AMutableDouble rval = new AMutableDouble(0);
+                    arg.isRealValue(rval);
+                    result.setBooleanValue(rval.getDoubleValue() != 0.0);
+                    return (true);
+                }
+
+                case STRING_VALUE: {
+                    AMutableCharArrayString buf = new AMutableCharArrayString();
+                    arg.isStringValue(buf);
+                    if (buf.equalsIgnoreCase("false") || buf.size() == 0) {
+                        result.setBooleanValue(false);
+                    } else {
+                        result.setBooleanValue(true);
+                    }
+                    return (true);
+                }
+
+                case RELATIVE_TIME_VALUE: {
+                    ClassAdTime rsecs = new ClassAdTime();
+                    arg.isRelativeTimeValue(rsecs);
+                    result.setBooleanValue(rsecs.getTimeInMillis() != 0);
+                    return (true);
+                }
+
+                default:
+                    throw new HyracksDataException("Should not reach here");
+            }
+        }
+    };
+    public static final ClassAdFunc convTime = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+            Value arg2 = new Value();
+            boolean relative = name.equalsIgnoreCase("reltime");
+            boolean secondarg = false; // says whether a 2nd argument exists
+            AMutableInt64 arg2num = new AMutableInt64(0);
+
+            if (argList.size() == 0 && !relative) {
+                // absTime with no arguments returns the current time.
+                return currentTime.call(name, argList, state, result);
+            }
+            if ((argList.size() < 1) || (argList.size() > 2)) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return (false);
+            }
+            if (argList.size() == 2) { // we have a 2nd argument
+                secondarg = true;
+                if (!argList.get(1).publicEvaluate(state, arg2)) {
+                    result.setErrorValue();
+                    return (false);
+                }
+                AMutableInt64 ivalue2 = new AMutableInt64(0);
+                AMutableDouble rvalue2 = new AMutableDouble(0);
+                ClassAdTime rsecs = new ClassAdTime();
+                if (relative) {// 2nd argument is N/A for reltime
+                    result.setErrorValue();
+                    return (true);
+                }
+                // 2nd arg should be integer, real or reltime
+                else if (arg2.isIntegerValue(ivalue2)) {
+                    arg2num.setValue(ivalue2.getLongValue());
+                } else if (arg2.isRealValue(rvalue2)) {
+                    arg2num.setValue((long) rvalue2.getDoubleValue());
+                } else if (arg2.isRelativeTimeValue(rsecs)) {
+                    arg2num.setValue(rsecs.getTimeInMillis());
+                } else {
+                    result.setErrorValue();
+                    return (true);
+                }
+            } else {
+                secondarg = false;
+                arg2num.setValue(0);
+            }
+
+            switch (arg.getType()) {
+                case UNDEFINED_VALUE:
+                    result.setUndefinedValue();
+                    return (true);
+
+                case ERROR_VALUE:
+                case CLASSAD_VALUE:
+                case LIST_VALUE:
+                case SLIST_VALUE:
+                case BOOLEAN_VALUE:
+                    result.setErrorValue();
+                    return (true);
+
+                case INTEGER_VALUE: {
+                    AMutableInt64 ivalue = new AMutableInt64(0);
+                    arg.isIntegerValue(ivalue);
+                    if (relative) {
+                        result.setRelativeTimeValue(new ClassAdTime(ivalue.getLongValue(), false));
+                    } else {
+                        ClassAdTime atvalue = new ClassAdTime(true);
+                        atvalue.setValue(ivalue.getLongValue());
+                        if (secondarg) //2nd arg is the offset in secs
+                            atvalue.setTimeZone((int) arg2num.getLongValue());
+                        else
+                            // the default offset is the current timezone
+                            atvalue.setTimeZone(Literal.findOffset(atvalue));
+
+                        if (atvalue.getOffset() == -1) {
+                            result.setErrorValue();
+                            return (false);
+                        } else
+                            result.setAbsoluteTimeValue(atvalue);
+                    }
+                    return (true);
+                }
+
+                case REAL_VALUE: {
+                    AMutableDouble rvalue = new AMutableDouble(0);
+                    arg.isRealValue(rvalue);
+                    if (relative) {
+                        result.setRelativeTimeValue(new ClassAdTime((long) (1000 * rvalue.getDoubleValue()), false));
+                    } else {
+                        ClassAdTime atvalue = new ClassAdTime();
+                        atvalue.setValue((long) rvalue.getDoubleValue());
+                        if (secondarg) //2nd arg is the offset in secs
+                            atvalue.setTimeZone((int) arg2num.getLongValue());
+                        else
+                            // the default offset is the current timezone
+                            atvalue.setTimeZone(Literal.findOffset(atvalue));
+                        result.setAbsoluteTimeValue(atvalue);
+                    }
+                    return (true);
+                }
+
+                case STRING_VALUE: {
+                    //should'nt come here
+                    // a string argument to this function is transformed to a literal directly
+                }
+
+                case ABSOLUTE_TIME_VALUE: {
+                    ClassAdTime secs = new ClassAdTime();
+                    arg.isAbsoluteTimeValue(secs);
+                    if (relative) {
+                        result.setRelativeTimeValue(secs);
+                    } else {
+                        result.copyFrom(arg);
+                    }
+                    return (true);
+                }
+                case RELATIVE_TIME_VALUE: {
+                    if (relative) {
+                        result.copyFrom(arg);
+                    } else {
+                        ClassAdTime secs = new ClassAdTime();
+                        arg.isRelativeTimeValue(secs);
+                        ClassAdTime atvalue = new ClassAdTime();
+                        atvalue.setValue(secs);
+                        if (secondarg) //2nd arg is the offset in secs
+                            atvalue.setTimeZone((int) arg2num.getLongValue());
+                        else
+                            // the default offset is the current timezone
+                            atvalue.setTimeZone(Literal.findOffset(atvalue));
+                        result.setAbsoluteTimeValue(atvalue);
+                    }
+                    return (true);
+                }
+
+                default:
+                    throw new HyracksDataException("Should not reach here");
+            }
+        }
+    };
+    public static final ClassAdFunc doRound = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+            Value realValue = new Value();
+            // takes exactly one argument
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return (false);
+            }
+            if (arg.getType() == ValueType.INTEGER_VALUE) {
+                result.copyFrom(arg);
+            } else {
+                if (!Value.convertValueToRealValue(arg, realValue)) {
+                    result.setErrorValue();
+                } else {
+                    AMutableDouble rvalue = new AMutableDouble(0);
+                    realValue.isRealValue(rvalue);
+                    if (name.equalsIgnoreCase("floor")) {
+                        result.setIntegerValue((long) Math.floor(rvalue.getDoubleValue()));
+                    } else if (name.equalsIgnoreCase("ceil") || name.equalsIgnoreCase("ceiling")) {
+                        result.setIntegerValue((long) Math.ceil(rvalue.getDoubleValue()));
+                    } else if (name.equalsIgnoreCase("round")) {
+                        result.setIntegerValue(Math.round(rvalue.getDoubleValue()));
+                    } else {
+                        result.setErrorValue();
+                    }
+                }
+            }
+            return true;
+        }
+    };
+    public static final ClassAdFunc doMath2 = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+            Value arg2 = new Value();
+
+            // takes 2 arguments  pow(val,base)
+            if (argList.size() != 2) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg) || !argList.get(1).publicEvaluate(state, arg2)) {
+                result.setErrorValue();
+                return (false);
+            }
+
+            if (name.equalsIgnoreCase("pow")) {
+                // take arg2 to the power of arg2
+                AMutableInt64 ival = new AMutableInt64(0);
+                AMutableInt64 ibase = new AMutableInt64(0);
+                if (arg.isIntegerValue(ival) && arg2.isIntegerValue(ibase) && ibase.getLongValue() >= 0) {
+                    ival.setValue((long) (Math.pow(ival.getLongValue(), ibase.getLongValue())));
+                    result.setIntegerValue(ival.getLongValue());
+                } else {
+                    Value realValue = new Value();
+                    Value realBase = new Value();
+                    if (!Value.convertValueToRealValue(arg, realValue)
+                            || !Value.convertValueToRealValue(arg2, realBase)) {
+                        result.setErrorValue();
+                    } else {
+                        AMutableDouble rvalue = new AMutableDouble(0);
+                        AMutableDouble rbase = new AMutableDouble(1);
+                        realValue.isRealValue(rvalue);
+                        realBase.isRealValue(rbase);
+                        result.setRealValue(Math.pow(rvalue.getDoubleValue(), rbase.getDoubleValue()));
+                    }
+                }
+            } else if (name.equalsIgnoreCase("quantize")) {
+                // quantize arg1 to the next integral multiple of arg2
+                // if arg2 is a list, choose the first item from the list that is larger than arg1
+                // if arg1 is larger than all of the items in the list, the result is an error.
+
+                Value val = new Value();
+                Value base = new Value();
+                if (!Value.convertValueToRealValue(arg, val)) {
+                    result.setErrorValue();
+                } else {
+                    // get the value to quantize into rval.
+                    AMutableDouble rval = new AMutableDouble(0);
+                    AMutableDouble rbase = new AMutableDouble(0);
+                    val.isRealValue(rval);
+                    if (arg2.isListValue()) {
+                        ExprList list = new ExprList();
+                        arg2.isListValue(list);
+                        base.setRealValue(0.0);
+                        rbase.setValue(0.0); // treat an empty list as 'don't quantize'
+                        for (ExprTree expr : list.getExprList()) {
+                            if (!expr.publicEvaluate(state, base)) {
+                                result.setErrorValue();
+                                return false; // eval should not fail
+                            }
+                            if (Value.convertValueToRealValue(base, val)) {
+                                val.isRealValue(rbase);
+                                if (rbase.getDoubleValue() >= rval.getDoubleValue()) {
+                                    result.setValue(base);
+                                    return true;
+                                }
+                            } else {
+                                //TJ: should we ignore values that can't be converted?
+                                result.setErrorValue();
+                                return true;
+                            }
+                        }
+                        // at this point base is the value of the last expression in the list.
+                        // and rbase is the real value of it and rval > rbase.
+                        // when this happens we want to quantize on multiples of the last
+                        // list value, as if on a single value were passed rather than a list.
+                        arg2.setValue(base);
+                    } else {
+                        // if arg2 is not a list, then it must evaluate to a real value
+                        // or we can't use it. (note that if it's an int, we still want
+                        // to return an int, but we assume that all ints can be converted to real)
+                        if (!Value.convertValueToRealValue(arg2, base)) {
+                            result.setErrorValue();
+                            return true;
+                        }
+                        base.isRealValue(rbase);
+                    }
+
+                    // at this point rbase should contain the real value of either arg2 or the
+                    // last entry in the list. and rval should contain the value to be quantized.
+
+                    AMutableInt64 ival = new AMutableInt64(0);
+                    AMutableInt64 ibase = new AMutableInt64(0);
+                    if (arg2.isIntegerValue(ibase)) {
+                        // quantize to an integer base,
+                        if (ibase.getLongValue() == 0L)
+                            result.setValue(arg);
+                        else if (arg.isIntegerValue(ival)) {
+                            ival.setValue(((ival.getLongValue() + ibase.getLongValue() - 1) / ibase.getLongValue())
+                                    * ibase.getLongValue());
+                            result.setIntegerValue(ival.getLongValue());
+                        } else {
+                            rval.setValue(
+                                    Math.ceil(rval.getDoubleValue() / ibase.getLongValue()) * ibase.getLongValue());
+                            result.setRealValue(rval);
+                        }
+                    } else {
+                        double epsilon = 1e-8;
+                        if (rbase.getDoubleValue() >= -epsilon && rbase.getDoubleValue() <= epsilon) {
+                            result.setValue(arg);
+                        } else {
+                            // we already have the real-valued base in rbase so just use it here.
+                            rval.setValue(
+                                    Math.ceil(rval.getDoubleValue() / rbase.getDoubleValue()) * rbase.getDoubleValue());
+                            result.setRealValue(rval);
+                        }
+                    }
+                }
+            } else {
+                // unknown 2 argument math function
+                result.setErrorValue();
+            }
+            return true;
+        }
+    };
+    public static final ClassAdFunc random = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();;
+            // takes exactly one argument
+            if (argList.size() > 1) {
+                result.setErrorValue();
+                return (true);
+            } else if (argList.size() == 0) {
+                arg.setRealValue(1.0);
+            } else if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return (false);
+            }
+            AMutableInt64 int_max = new AMutableInt64(0);
+            AMutableDouble double_max = new AMutableDouble(0);
+            Random randomGenerator = new Random(System.currentTimeMillis());
+            if (arg.isIntegerValue(int_max)) {
+                int random_int = randomGenerator.nextInt((int) int_max.getLongValue());
+                result.setIntegerValue(random_int);
+            } else if (arg.isRealValue(double_max)) {
+                double random_double = double_max.getDoubleValue() * randomGenerator.nextDouble();
+                result.setRealValue(random_double);
+            } else {
+                result.setErrorValue();
+            }
+
+            return true;
+        }
+    };
+    public static final ClassAdFunc ifThenElse = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg1 = new Value();
+            MutableBoolean arg1_bool = new MutableBoolean();
+            // takes exactly three arguments
+            if (argList.size() != 3) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg1)) {
+                result.setErrorValue();
+                return (false);
+            }
+            switch (arg1.getType()) {
+                case BOOLEAN_VALUE:
+                    if (!arg1.isBooleanValue(arg1_bool)) {
+                        result.setErrorValue();
+                        return (false);
+                    }
+                    break;
+                case INTEGER_VALUE: {
+                    AMutableInt64 intval = new AMutableInt64(0);
+                    if (!arg1.isIntegerValue(intval)) {
+                        result.setErrorValue();
+                        return (false);
+                    }
+                    arg1_bool.setValue(intval.getLongValue() != 0L);
+                    break;
+                }
+                case REAL_VALUE: {
+                    AMutableDouble realval = new AMutableDouble(0);
+                    if (!arg1.isRealValue(realval)) {
+                        result.setErrorValue();
+                        return (false);
+                    }
+                    arg1_bool.setValue(realval.getDoubleValue() != 0.0);
+                    break;
+                }
+                case UNDEFINED_VALUE:
+                    result.setUndefinedValue();
+                    return (true);
+                case ERROR_VALUE:
+                case CLASSAD_VALUE:
+                case LIST_VALUE:
+                case SLIST_VALUE:
+                case STRING_VALUE:
+                case ABSOLUTE_TIME_VALUE:
+                case RELATIVE_TIME_VALUE:
+                case NULL_VALUE:
+                    result.setErrorValue();
+                    return (true);
+            }
+            if (arg1_bool.booleanValue()) {
+                if (!argList.get(1).publicEvaluate(state, result)) {
+                    result.setErrorValue();
+                    return (false);
+                }
+            } else {
+                if (!argList.get(2).publicEvaluate(state, result)) {
+                    result.setErrorValue();
+                    return (false);
+                }
+            }
+            return true;
+        }
+    };
+    public static final ClassAdFunc stringListsIntersect = new ClassAdFunc() {
+
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg0 = new Value();
+            Value arg1 = new Value();
+            Value arg2 = new Value();
+            boolean have_delimiter;
+            AMutableCharArrayString str0 = new AMutableCharArrayString();
+            AMutableCharArrayString str1 = new AMutableCharArrayString();
+            AMutableCharArrayString delimiter_string = new AMutableCharArrayString();
+
+            // need two or three arguments: pattern, list, optional settings
+            if (argList.size() != 2 && argList.size() != 3) {
+                result.setErrorValue();
+                return true;
+            }
+            if (argList.size() == 2) {
+                have_delimiter = false;
+            } else {
+                have_delimiter = true;
+            }
+
+            // Evaluate args
+            if (!argList.get(0).publicEvaluate(state, arg0) || !argList.get(1).publicEvaluate(state, arg1)) {
+                result.setErrorValue();
+                return true;
+            }
+            if (have_delimiter && !argList.get(2).publicEvaluate(state, arg2)) {
+                result.setErrorValue();
+                return true;
+            }
+
+            // if either arg is error, the result is error
+            if (arg0.isErrorValue() || arg1.isErrorValue()) {
+                result.setErrorValue();
+                return true;
+            }
+            if (have_delimiter && arg2.isErrorValue()) {
+                result.setErrorValue();
+                return true;
+            }
+
+            // if either arg is undefined, the result is undefined
+            if (arg0.isUndefinedValue() || arg1.isUndefinedValue()) {
+                result.setUndefinedValue();
+                return true;
+            }
+            if (have_delimiter && arg2.isUndefinedValue()) {
+                result.setUndefinedValue();
+                return true;
+            } else if (have_delimiter && !arg2.isStringValue(delimiter_string)) {
+                result.setErrorValue();
+                return true;
+            }
+
+            // if the arguments are not of the correct types, the result
+            // is an error
+            if (!arg0.isStringValue(str0) || !arg1.isStringValue(str1)) {
+                result.setErrorValue();
+                return true;
+            }
+            result.setBooleanValue(false);
+
+            List<String> list0 = new ArrayList<String>();
+            Set<String> set1 = new HashSet<String>();
+
+            split_string_list(str0, have_delimiter ? delimiter_string.charAt(0) : ',', list0);
+            split_string_set(str1, have_delimiter ? delimiter_string.charAt(0) : ',', set1);
+
+            for (String str : list0) {
+                if (set1.contains(str)) {
+                    result.setBooleanValue(true);
+                    break;
+                }
+            }
+
+            return true;
+        }
+    };
+    public static final ClassAdFunc interval = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+            Value intarg = new Value();
+            AMutableInt64 tot_secs = new AMutableInt64(0);
+            // takes exactly one argument
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return (false);
+            }
+            if (!Value.convertValueToIntegerValue(arg, intarg)) {
+                result.setErrorValue();
+                return (true);
+            }
+            if (!intarg.isIntegerValue(tot_secs)) {
+                result.setErrorValue();
+                return (true);
+            }
+            long days = tot_secs.getLongValue() / (3600 * 24);
+            tot_secs.setValue(tot_secs.getLongValue() % (3600 * 24));
+            long hours = tot_secs.getLongValue() / 3600;
+            tot_secs.setValue(tot_secs.getLongValue() % 3600);
+            long min = tot_secs.getLongValue() / 60;
+            long secs = tot_secs.getLongValue() % 60;
+            String strval;
+            if (days != 0) {
+                strval = String.format("%d+%02d:%02d:%02d", days, Math.abs(hours), Math.abs(min), Math.abs(secs));
+            } else if (hours != 0) {
+                strval = String.format("%d:%02d:%02d", hours, Math.abs(min), Math.abs(secs));
+            } else if (min != 0) {
+                strval = String.format("%d:%02d", min, Math.abs(secs));
+            } else {
+                strval = String.format("%d", secs);
+            }
+            result.setStringValue(strval);
+            return true;
+        }
+    };
+    public static final ClassAdFunc eval = new ClassAdFunc() {
+        @Override
+        public boolean call(String name, ExprList argList, EvalState state, Value result) throws HyracksDataException {
+            Value arg = new Value();
+            Value strarg = new Value();
+            // takes exactly one argument
+            if (argList.size() != 1) {
+                result.setErrorValue();
+                return true;
+            }
+            if (!argList.get(0).publicEvaluate(state, arg)) {
+                result.setErrorValue();
+                return false;
+            }
+            AMutableCharArrayString s = new AMutableCharArrayString();
+            if (!Value.convertValueToStringValue(arg, strarg) || !strarg.isStringValue(s)) {
+                result.setErrorValue();
+                return true;
+            }
+            if (state.getDepthRemaining() <= 0) {
+                result.setErrorValue();
+                return false;
+            }
+            ClassAdParser parser = new ClassAdParser();
+            ExprTreeHolder expr = new ExprTreeHolder();
+            try {
+                if (!parser.parseExpression(s.toString(), expr, true) || (expr.getInnerTree() == null)) {
+                    result.setErrorValue();
+                    return true;
+                }
+            } catch (IOException e) {
+                throw new HyracksDataException(e);
+            }
+            state.decrementDepth();
+            expr.setParentScope(state.getCurAd());
+            boolean eval_ok = expr.publicEvaluate(state, result);
+            state.incrementDepth();
+            if (!eval_ok) {
+                result.setErrorValue();
+                return false;
+            }
+            return true;
+        }
+    };
+
+    public static void split_string_list(AMutableCharArrayString amutableString, char delim, List<String> list) {
+        if (amutableString.getLength() == 0) {
+            return;
+        }
+        int index = 0;
+        int lastIndex = 0;
+        while (index < amutableString.getLength()) {
+            index = amutableString.firstIndexOf(delim, lastIndex);
+            if (index > 0) {
+                list.add(amutableString.substr(lastIndex, index - lastIndex).trim());
+                lastIndex = index + 1;
+            } else {
+                if (amutableString.getLength() > lastIndex) {
+                    list.add(amutableString.substr(lastIndex).trim());
+                }
+                break;
+            }
+        }
+    }
+
+    public static void split_string_set(AMutableCharArrayString amutableString, char delim, Set<String> set) {
+        if (amutableString.getLength() == 0) {
+            return;
+        }
+        int index = 0;
+        int lastIndex = 0;
+        while (index < amutableString.getLength()) {
+            index = amutableString.firstIndexOf(delim, lastIndex);
+            if (index > 0) {
+                set.add(amutableString.substr(lastIndex, index - lastIndex).trim());
+                lastIndex = index + 1;
+            } else {
+                if (amutableString.getLength() > lastIndex) {
+                    set.add(amutableString.substr(lastIndex).trim());
+                }
+                break;
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/CaseInsensitiveString.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/CaseInsensitiveString.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/CaseInsensitiveString.java
new file mode 100644
index 0000000..ecc69f8
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/CaseInsensitiveString.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+public class CaseInsensitiveString implements Comparable<CaseInsensitiveString> {
+    private String aString;
+
+    public String get() {
+        return aString;
+    }
+
+    @Override
+    public String toString() {
+        return aString;
+    }
+
+    public void set(String aString) {
+        this.aString = aString;
+    }
+
+    public CaseInsensitiveString(String aString) {
+        this.aString = aString;
+    }
+
+    public CaseInsensitiveString() {
+        this.aString = null;
+    }
+
+    @Override
+    public int compareTo(CaseInsensitiveString o) {
+        return aString.compareToIgnoreCase(o.aString);
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        return (o instanceof CaseInsensitiveString) ? aString.equalsIgnoreCase(((CaseInsensitiveString) o).aString)
+                : false;
+    }
+
+    @Override
+    public int hashCode() {
+        return aString.toLowerCase().hashCode();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/CharArrayLexerSource.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/CharArrayLexerSource.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/CharArrayLexerSource.java
new file mode 100644
index 0000000..3501953
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/CharArrayLexerSource.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.io.IOException;
+
+public class CharArrayLexerSource extends LexerSource {
+
+    private char[] input;
+    private int offset;
+
+    @Override
+    public int getPosition() {
+        return offset;
+    }
+
+    public CharArrayLexerSource() {
+        offset = 0;
+        input = null;
+    }
+
+    public CharArrayLexerSource(char[] input, int offset) {
+        SetNewSource(input, offset);
+    }
+
+    private void SetNewSource(char[] input, int offset2) {
+        this.input = input;
+        this.offset = offset2;
+    }
+
+    @Override
+    public char readCharacter() throws IOException {
+        if (offset == input.length) {
+            previousCharacter = Lexer.EOF;
+            return previousCharacter;
+        } else {
+            previousCharacter = input[offset];
+            offset++;
+            return previousCharacter;
+        }
+    }
+
+    @Override
+    public void unreadCharacter() {
+        if (offset > 0) {
+            if (previousCharacter != Lexer.EOF) {
+                offset--;
+            }
+        }
+    }
+
+    @Override
+    public boolean atEnd() {
+        return offset == input.length;
+    }
+
+    public int GetCurrentLocation() {
+        return offset;
+    }
+
+    @Override
+    public void setNewSource(char[] buffer) {
+        SetNewSource(buffer, 0);
+    }
+
+    @Override
+    public char[] getBuffer() {
+        return input;
+    }
+}


[30/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/data/external-parser/jobads.old
----------------------------------------------------------------------
diff --git a/asterix-app/data/external-parser/jobads.old b/asterix-app/data/external-parser/jobads.old
new file mode 100644
index 0000000..7a1abd7
--- /dev/null
+++ b/asterix-app/data/external-parser/jobads.old
@@ -0,0 +1,1106 @@
+MaxWallTimeMins_RAW = 1315
+CRAB_ASOTimeout = 86400
+MaxHosts = 1
+RequestMemory_RAW = 2000
+CRAB_TFileOutputFiles = {  }
+User = "uscms5616@cms"
+JobFinishedHookDone = 1439847319
+DAG_NodesReady = 0
+OnExitHold = ( ExitCode =!= undefined && ExitCode != 0 )
+CoreSize = -1
+CRAB_DashboardTaskType = "analysis"
+DAG_NodesDone = 25
+CRAB_Attempt = 0
+LastHoldReason = "Spooling input data files"
+WantRemoteSyscalls = false
+MyType = "Job"
+CumulativeSuspensionTime = 0
+MinHosts = 1
+ReleaseReason = "Data files spooled"
+PeriodicHold = false
+PeriodicRemove = ( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )
+Err = "_condor_stderr"
+CRAB_AdditionalOutputFiles = { "combine_output.tar" }
+ProcId = 0
+CRAB_UserGroup = "dcms"
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+EnteredCurrentStatus = 1439847319
+CRAB_SiteWhitelist = {  }
+NumJobStarts = 1
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage"
+JobUniverse = 7
+AutoClusterId = 10378
+In = "/dev/null"
+SUBMIT_TransferOutputRemaps = "_condor_stdout=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.out;_condor_stderr=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.err"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/73/uscms5616/150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+Requirements = true || false && TARGET.OPSYS == "LINUX" && TARGET.ARCH == "X86_64" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory
+CRAB_SplitAlgo = "EventBased"
+CRAB_UserDN = "/C=DE/O=GermanGrid/OU=KIT/CN=Andrew Gilbert"
+ClusterId = 1217455
+WhenToTransferOutput = "ON_EXIT"
+CRAB_AsyncDest = "T2_CH_CERN"
+CompletionDate = 1439847319
+OtherJobRemoveRequirements = DAGManJobId =?= ClusterId
+CRAB_FailedNodeLimit = -1
+BufferSize = 524288
+CRAB_RestURInoAPI = "/crabserver/prod"
+Environment = strcat("PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=",ClusterId,".",ProcId," CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local")
+TargetType = "Machine"
+LeaveJobInQueue = JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )
+CRAB_UserRole = undefined
+JobNotification = 0
+Owner = "uscms5616"
+CondorPlatform = "$CondorPlatform: X86_64-ScientificLinux_6.6 $"
+CRAB_UserHN = "agilbert"
+CommittedTime = 0
+X509UserProxy = "63f0c4d862d8b4e4ddcfd29ed85b6b5899660759"
+QDate = 1439764883
+ExitStatus = 0
+DAG_NodesFailed = 0
+RootDir = "/"
+JobCurrentStartDate = 1439764892
+CurrentHosts = 0
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1217455.0#1439764883"
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+RemoteSysCpu = 0.0
+TotalSuspensions = 0
+WantCheckpoint = false
+CRAB_RestHost = "cmsweb.cern.ch"
+CRAB_RetryOnASOFailures = 1
+Args = "RunJobs.dag"
+TransferInput = "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz"
+CRAB_JobArch = "slc6_amd64_gcc491"
+PeriodicRelease = false
+CRAB_TaskWorker = "vocms052"
+NumCkpts_RAW = 0
+CondorVersion = "$CondorVersion: 8.3.1 Jun 19 2015 $"
+RemoteCondorSetup = ""
+Out = "_condor_stdout"
+ShouldTransferFiles = "YES"
+DAG_NodesPrerun = 0
+DiskUsage = 1
+JobRunCount = 1
+CumulativeSlotTime = 82427.0
+CommittedSlotTime = 0
+LocalUserCpu = 0.0
+CRAB_SiteBlacklist = { "T2_FR_CCIN2P3","T1_IT_CNAF","T1_ES_PIC","T1_UK_RAL","T2_FI_HIP","T2_US_Nebraska" }
+DAG_NodesQueued = 0
+CRAB_JobCount = 25
+JobStartDate = 1439764892
+DAG_Status = 0
+CRAB_AlgoArgs = "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"splitOnRun\": false, \"events_per_job\": 1}, \"halt_job_on_file_boundaries\": false}"
+CRAB_SaveLogsFlag = 0
+CRAB_JobType = "analysis"
+CRAB_TransferOutputs = 1
+ExitBySignal = false
+StreamErr = false
+RemoveKillSig = "SIGUSR1"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+NumRestarts = 0
+NumSystemHolds = 0
+RequestDisk = DiskUsage
+OrigMaxHosts = 1
+JobPrio = 10
+NumCkpts = 0
+BufferBlockSize = 32768
+StageInStart = 1439764886
+ImageSize = 100
+MaxWallTimeMins = 1400
+DiskUsage_RAW = 1
+DAG_NodesUnready = 0
+CommittedSuspensionTime = 0
+CRAB_NumAutomJobRetries = 2
+CRAB_UserVO = "cms"
+CRAB_EDMOutputFiles = {  }
+Cmd = "dag_bootstrap_startup.sh"
+LocalSysCpu = 0.0
+Iwd = "/data/condor_local/spool/7455/0/cluster1217455.proc0.subproc0"
+LastHoldReasonCode = 16
+CRAB_PublishName = "prefit_cms_asimov_A1_5D-59ffde2b5d41be5f0c401d0a6a8a0194"
+CRAB_LumiMask = "{}"
+DAG_InRecovery = 0
+CRAB_MaxPost = 20
+TaskType = "ROOT"
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+LastSuspensionTime = 0
+CRAB_PublishGroupName = 0
+TransferOutputRemaps = undefined
+TransferOutput = "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001"
+CRAB_Workflow = "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+CRAB_JobSW = "CMSSW_7_4_0_pre9"
+DAG_NodesPostrun = 0
+ExitCode = 0
+JobStatus = 4
+RemoteWallClockTime = 82427.0
+ImageSize_RAW = 100
+OnExitRemove = ( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )
+DAG_NodesTotal = 25
+CRAB_InputData = "/MinBias"
+SUBMIT_x509userproxy = "/data/certs/creds/63f0c4d862d8b4e4ddcfd29ed85b6b5899660759"
+StreamOut = false
+CRAB_ReqName = "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+CurrentTime = time()
+HoldKillSig = "SIGUSR1"
+RequestMemory = 2000
+NiceUser = false
+RemoteUserCpu = 0.0
+CRAB_Publish = 0
+RequestCpus = 1
+SUBMIT_Iwd = "/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9"
+WantRemoteIO = true
+CRAB_BlacklistT1 = 0
+StageInFinish = 1439764891
+LastJobStatus = 2
+
+MaxWallTimeMins_RAW = 1315
+CRAB_ASOTimeout = 86400
+MaxHosts = 1
+RequestMemory_RAW = 2000
+CRAB_TFileOutputFiles = {  }
+User = "uscms5050@cms"
+JobFinishedHookDone = 1439773907
+DAG_NodesReady = 0
+OnExitHold = ( ExitCode =!= undefined && ExitCode != 0 )
+CoreSize = -1
+CRAB_DashboardTaskType = "analysis"
+DAG_NodesDone = 30
+CRAB_Attempt = 0
+LastHoldReason = "Spooling input data files"
+WantRemoteSyscalls = false
+MyType = "Job"
+CumulativeSuspensionTime = 0
+MinHosts = 1
+ReleaseReason = "Data files spooled"
+PeriodicHold = false
+PeriodicRemove = ( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )
+Err = "_condor_stderr"
+CRAB_AdditionalOutputFiles = {  }
+ProcId = 0
+CRAB_UserGroup = undefined
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+EnteredCurrentStatus = 1439773907
+CRAB_SiteWhitelist = { "T3_US_FNALLPC","T2_US_Purdue","T2_US_Nebraska" }
+NumJobStarts = 1
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage"
+JobUniverse = 7
+AutoClusterId = 10378
+In = "/dev/null"
+SUBMIT_TransferOutputRemaps = "_condor_stdout=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.out;_condor_stderr=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.err"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/67/uscms5050/150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+Requirements = true || false && TARGET.OPSYS == "LINUX" && TARGET.ARCH == "X86_64" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory
+CRAB_SplitAlgo = "EventBased"
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ferencek/CN=650164/CN=Dinko Ferencek"
+ClusterId = 1206367
+WhenToTransferOutput = "ON_EXIT"
+CRAB_AsyncDest = "T3_US_FNALLPC"
+CompletionDate = 1439773907
+OtherJobRemoveRequirements = DAGManJobId =?= ClusterId
+CRAB_FailedNodeLimit = -1
+BufferSize = 524288
+CRAB_RestURInoAPI = "/crabserver/prod"
+Environment = strcat("PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=",ClusterId,".",ProcId," CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local")
+TargetType = "Machine"
+LeaveJobInQueue = JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )
+x509userproxyexpiration = 1440294044
+CRAB_UserRole = undefined
+JobNotification = 0
+Owner = "uscms5050"
+CondorPlatform = "$CondorPlatform: X86_64-ScientificLinux_6.6 $"
+CRAB_UserHN = "ferencek"
+CommittedTime = 0
+X509UserProxy = "3a7798796bc24a800001338917ec45991bcf0a96"
+QDate = 1439615565
+ExitStatus = 0
+DAG_NodesFailed = 0
+RootDir = "/"
+JobCurrentStartDate = 1439615574
+CurrentHosts = 0
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1206367.0#1439615565"
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+RemoteSysCpu = 0.0
+TotalSuspensions = 0
+WantCheckpoint = false
+CRAB_RestHost = "cmsweb.cern.ch"
+CRAB_RetryOnASOFailures = 1
+Args = "RunJobs.dag"
+TransferInput = "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz"
+CRAB_JobArch = "slc6_amd64_gcc481"
+PeriodicRelease = false
+CRAB_TaskWorker = "vocms052"
+NumCkpts_RAW = 0
+CondorVersion = "$CondorVersion: 8.3.1 Jun 19 2015 $"
+RemoteCondorSetup = ""
+Out = "_condor_stdout"
+ShouldTransferFiles = "YES"
+DAG_NodesPrerun = 0
+DiskUsage = 1
+JobRunCount = 1
+CumulativeSlotTime = 158333.0
+CommittedSlotTime = 0
+LocalUserCpu = 0.0
+CRAB_SiteBlacklist = {  }
+DAG_NodesQueued = 0
+CRAB_JobCount = 30
+JobStartDate = 1439615574
+DAG_Status = 0
+CRAB_AlgoArgs = "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"lheInputFiles\": true, \"splitOnRun\": false, \"events_per_job\": 50000}, \"halt_job_on_file_boundaries\": false}"
+CRAB_SaveLogsFlag = 0
+CRAB_JobType = "analysis"
+CRAB_TransferOutputs = 1
+ExitBySignal = false
+StreamErr = false
+RemoveKillSig = "SIGUSR1"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+NumRestarts = 0
+NumSystemHolds = 0
+RequestDisk = DiskUsage
+OrigMaxHosts = 1
+JobPrio = 10
+NumCkpts = 0
+BufferBlockSize = 32768
+StageInStart = 1439615569
+ImageSize = 100
+MaxWallTimeMins = 1400
+DiskUsage_RAW = 1
+DAG_NodesUnready = 0
+CommittedSuspensionTime = 0
+CRAB_NumAutomJobRetries = 2
+CRAB_UserVO = "cms"
+CRAB_EDMOutputFiles = { "Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE.root" }
+Cmd = "dag_bootstrap_startup.sh"
+LocalSysCpu = 0.0
+Iwd = "/data/condor_local/spool/6367/0/cluster1206367.proc0.subproc0"
+LastHoldReasonCode = 16
+CRAB_PublishName = "LHE-17521057f93ed9cadf21dd45b3505145"
+CRAB_LumiMask = "{}"
+DAG_InRecovery = 0
+CRAB_MaxPost = 20
+TaskType = "ROOT"
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+LastSuspensionTime = 0
+CRAB_PublishGroupName = 0
+TransferOutputRemaps = undefined
+TransferOutput = "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001"
+CRAB_Workflow = "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+CRAB_JobSW = "CMSSW_7_1_18"
+DAG_NodesPostrun = 0
+ExitCode = 0
+JobStatus = 4
+RemoteWallClockTime = 158333.0
+ImageSize_RAW = 100
+OnExitRemove = ( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )
+DAG_NodesTotal = 30
+CRAB_InputData = "/Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8"
+SUBMIT_x509userproxy = "/data/certs/creds/3a7798796bc24a800001338917ec45991bcf0a96"
+StreamOut = false
+CRAB_ReqName = "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+CurrentTime = time()
+HoldKillSig = "SIGUSR1"
+RequestMemory = 2000
+NiceUser = false
+RemoteUserCpu = 0.0
+CRAB_Publish = 1
+RequestCpus = 1
+SUBMIT_Iwd = "/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB"
+WantRemoteIO = true
+CRAB_BlacklistT1 = 0
+StageInFinish = 1439615572
+LastJobStatus = 2
+
+MaxWallTimeMins_RAW = 2800
+StatsLifetimeStarter = 165949
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "grid_cms"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins3@cmsgwms-factory.fnal.gov"
+SubmitEventNotes = "DAG Node: Job53"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+DAGParentNodeNames = ""
+MATCH_GLIDEIN_Site = "CERN"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 163084.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "59069"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+RecentBlockWrites = 0
+CurrentHosts = 0
+MATCH_GLIDEIN_ProcId = 1
+x509UserProxyExpiration = 1440397268
+Iwd = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_CH_CERN_ce302"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 75000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/128.142.45.103"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "689255460"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job53"
+LastPublicClaimId = "<128.142.45.103:55332>#1439963327#3#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_CH_CERN"
+RemoteSysCpu = 1963.0
+CRAB_Retry = 2
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1238992
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2800"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms5111"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_CH_CERN_ce302"
+LastJobLeaseRenewal = 1440131524
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_CH_CERN"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.main"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.kbutanov"
+MATCH_GLIDEIN_SiteWMS_Slot = "Unknown"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms5111@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm-eoscms.cern.ch"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2800
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440131525
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 1
+MATCH_GLIDEIN_Factory = "gfactory_service"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1233705.0#1439964847"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 59069
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1439965573
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/8123da6528ec4abd24562a99b4f2b0ec556bed0b"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440530096"
+MATCH_EXP_JOB_GLIDEIN_Factory = "gfactory_service"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.main"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 2128005.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=53 --cmsswVersion=CMSSW_7_4_7 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v2/50000/6E2F932B-633B-E511-A7AE-F04DA23BCE4C.root\"] --runAndLumis=job_lumis_53.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.53"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 165965.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_CH_CERN"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 53
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2800"
+MATCH_GLIDEIN_ToRetire = 1440530096
+ImageSize = 4250000
+JobCurrentStartDate = 1439965560
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1439965560
+LastMatchTime = 1439965560
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440564896"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+NumJobReconnects = 2
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins3@cmsgwms-factory.fnal.gov"
+SpooledOutputFiles = "jobReport.json.53"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.53"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 165965.0
+JobStatus = 4
+x509UserProxyEmail = "khakimjan.butanov@cern.ch"
+DAGManJobId = 1035690
+RemoteWallClockTime = 165965.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov"
+LastRemoteHost = "glidein_9757_931570227@b635ef6906.cern.ch"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T1_DE_KIT,T2_UK_London_IC,T2_CH_CERN"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 61434
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "1"
+CRAB_localOutputFiles = "stepB_MC.root=stepB_MC_53.root"
+MaxHosts = 1
+CRAB_UserHN = "kbutanov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm-eoscms.cern.ch"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms5111"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 4095188
+MATCH_EXP_Used_Gatekeeper = "ce302.cern.ch:8443/cream-lsf-grid_cms"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "grid_cms"
+MATCH_GLIDEIN_Gatekeeper = "ce302.cern.ch:8443/cream-lsf-grid_cms"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "CERN"
+UserLog = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM#85bfee36-3b82-11e5-be34-001e67abf518"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 2
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1439964847
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.53"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/log/cmsRun_53.log.tar.gz, srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/stepB_MC_53.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "stepB_MC.root" }
+AutoClusterId = 16275
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439209593
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 2
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "CERN"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_KR_KNU"
+ClusterId = 1233705
+BytesSent = 119952.0
+CRAB_PublishName = "crab_25ns_WJetsToLNu_HT600_800-9da7f68dc2032d8626d7e7822bb10506"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/user/kbutanov.03af76ad04ddc195ee96e6a5469f1bbb1777390d/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_CH_CERN"
+MATCH_GLIDEIN_MaxMemMBs = 2800
+RequestMemory = 2000
+EnteredCurrentStatus = 1440131525
+MATCH_GLIDEIN_SiteWMS = "LSF"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/68/uscms5111/150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "689255460"
+CRAB_JobSW = "CMSSW_7_4_7"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 2800
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "Unknown"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440131525
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440564896
+NiceUser = false
+RootDir = "/"
+CommittedTime = 165965
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "LSF"
+
+MaxWallTimeMins_RAW = 1400
+StatsLifetimeStarter = 33352
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "red.unl.edu"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins6@glidein.grid.iu.edu"
+SubmitEventNotes = "DAG Node: Job4"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+MATCH_GLIDEIN_Site = "Nebraska"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 28513.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "2561111"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+CurrentHosts = 0
+RecentBlockWrites = 0
+MATCH_GLIDEIN_ProcId = 8
+x509UserProxyExpiration = 1440171330
+Iwd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_long"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 3750000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/129.93.182.12"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "5092137.0"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job4"
+LastPublicClaimId = "<129.93.182.12:42491>#1440048812#7#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_US_Nebraska"
+RemoteSysCpu = 616.0
+CRAB_Retry = 3
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1148372
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2500"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms3850"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_long"
+LastJobLeaseRenewal = 1440115142
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_US_Nebraska"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.mrodozov"
+MATCH_GLIDEIN_SiteWMS_Slot = "slot1_6@red-d8n12.unl.edu"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms3850@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm.unl.edu"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2500
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440115142
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 0
+MATCH_GLIDEIN_Factory = "OSGGOC"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1235992.0#1440081300"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 2561111
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1440081789
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440616411"
+MATCH_EXP_JOB_GLIDEIN_Factory = "OSGGOC"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 44879356.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=4 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9A89CA60-69FC-E411-9661-0025905C42B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/34F8B66A-D4FB-E411-8F89-842B2B29273C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/7CE6B848-F5FB-E411-A605-0025905A60A8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9E842AA8-54FC-E411-8BC7-000F53273500.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/C44AD465-D4FB-E411-8704-002590200A40.root\",' '\"/store/m
 c/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/90B6CB1B-07FD-E411-BD52-001E67397CBA.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/183FB65F-69FC-E411-A5A8-0025904B7C26.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/5A0A9A0E-EDFB-E411-B95F-00266CF330B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/3E3768F1-61FC-E411-B163-002618943956.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/08DB9DDE-F4FB-E411-9BC9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/CE293F9B-54FC-E411-83E8-AC853D9DACD3.root\",' '\"/store/mc/RunIISpring
 15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/A4479F5F-69FC-E411-B0B5-0025904C6378.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/0419455F-D4FB-E411-AEFA-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E6BD6C76-54FC-E411-A1F2-AC853D9DACD7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/AC15F863-F5FB-E411-8F07-002590DB9286.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/CC9B7EE2-F4FB-E411-BCD9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/5844575F-D4FB-E411-81F5-003048FFD732.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt
 _300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/6EC5205E-D4FB-E411-9885-001E67396BB7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/B63200E8-69FC-E411-B949-0025904C51FC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/14554A42-54FC-E411-86D2-0025905A605E.root\"] --runAndLumis=job_lumis_4.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.4"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 33360.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_US_Nebraska"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 4
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2500"
+MATCH_GLIDEIN_ToRetire = 1440616411
+ImageSize = 1750000
+JobCurrentStartDate = 1440081782
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1440081782
+LastMatchTime = 1440081782
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440651211"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins6@glidein.grid.iu.edu"
+SpooledOutputFiles = "jobReport.json.4"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.4"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 33360.0
+JobStatus = 4
+x509UserProxyEmail = "mircho.nikolaev.rodozov@cern.ch"
+DAGManJobId = 1183604
+RemoteWallClockTime = 33360.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+LastRemoteHost = "glidein_1936_57194584@red-d8n12.unl.edu"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 3661158
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "8"
+CRAB_localOutputFiles = "results.root=results_4.root"
+MaxHosts = 1
+CRAB_UserHN = "mrodozov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm.unl.edu"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms3850"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 1727056
+MATCH_EXP_Used_Gatekeeper = "red.unl.edu red.unl.edu:9619"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "red.unl.edu"
+MATCH_GLIDEIN_Gatekeeper = "red.unl.edu red.unl.edu:9619"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "Nebraska"
+UserLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 42
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1440081300
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.4"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_4.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_4.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "results.root" }
+AutoClusterId = 16278
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439550850
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 3
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "Nebraska"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_CH_CERN"
+ClusterId = 1235992
+BytesSent = 597241.0
+CRAB_PublishName = "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_US_Nebraska"
+MATCH_GLIDEIN_MaxMemMBs = 2500
+RequestMemory = 2000
+EnteredCurrentStatus = 1440115142
+MATCH_GLIDEIN_SiteWMS = "HTCondor"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "5092137.0"
+CRAB_JobSW = "CMSSW_7_4_7_patch2"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 1400
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "slot1_6@red-d8n12.unl.edu"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440115142
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440651211
+NiceUser = false
+RootDir = "/"
+CommittedTime = 33360
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "HTCondor"
+
+MaxWallTimeMins_RAW = 1400
+StatsLifetimeStarter = 31968
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "red-gw1.unl.edu"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins5@gfactory-1.t2.ucsd.edu"
+SubmitEventNotes = "DAG Node: Job3"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+MATCH_GLIDEIN_Site = "Nebraska"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 27257.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "3043383"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+CurrentHosts = 0
+RecentBlockWrites = 0
+MATCH_GLIDEIN_ProcId = 14
+x509UserProxyExpiration = 1440171330
+Iwd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_gw1_long"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 4250000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/129.93.183.127"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "5096573.0"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job3"
+LastPublicClaimId = "<129.93.183.127:56441>#1440063351#7#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_US_Nebraska"
+RemoteSysCpu = 621.0
+CRAB_Retry = 3
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1174388
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2500"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms3850"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_gw1_long"
+LastJobLeaseRenewal = 1440113502
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_US_Nebraska"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.mrodozov"
+MATCH_GLIDEIN_SiteWMS_Slot = "slot1_32@red-d23n7.unl.edu"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms3850@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm.unl.edu"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2500
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440113503
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 0
+MATCH_GLIDEIN_Factory = "SDSC"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1235991.0#1440081300"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 3043383
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1440081533
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440630710"
+MATCH_EXP_JOB_GLIDEIN_Factory = "SDSC"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 44879356.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=3 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/06DE7D5F-D4FB-E411-9C85-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E29E093E-54FC-E411-8AE5-0025905A60FE.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/F0FDF730-EDFB-E411-842B-00261834B51D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/FECCF363-F5FB-E411-85A3-002590DBDFE0.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/0E4CEBFE-ECFB-E411-9F0C-842B2B29273C.root\",' '\"/store/m
 c/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/ECF66DCC-F0FB-E411-84CF-00259074AE32.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/96F29C69-D4FB-E411-9028-842B2B292627.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/6E887F0F-EDFB-E411-875B-BCAEC54B303A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/0C788712-F5FB-E411-AA0E-AC853D9DAC29.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/723A41AE-F4FB-E411-BAA3-0025905C431A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/DA4EA0F5-F4FB-E411-B2AD-00259073E31C.root\",' '\"/store/mc/RunIISpring
 15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/98C8F097-F7FB-E411-9A1F-52540006FB8D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/DE4F8235-5FFC-E411-80CD-0025905A6088.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/EA5D6151-F5FB-E411-99F0-0026B92E0C74.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/205D0CF9-F4FB-E411-934D-000F532734AC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/24BCAED9-F0FB-E411-A35B-00259074AE54.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/A4C160C1-F4FB-E411-A66D-B083FED76C6C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt
 _300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E86B536C-54FC-E411-8787-AC853D9DACE1.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/2E68E42D-EDFB-E411-8027-001E67397CC9.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/A056D12B-EDFB-E411-9E51-52540006FDD6.root\"] --runAndLumis=job_lumis_3.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.3"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 31976.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_US_Nebraska"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 3
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2500"
+MATCH_GLIDEIN_ToRetire = 1440630710
+ImageSize = 2000000
+JobCurrentStartDate = 1440081527
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1440081527
+LastMatchTime = 1440081527
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440665510"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins5@gfactory-1.t2.ucsd.edu"
+SpooledOutputFiles = "jobReport.json.3"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.3"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 31976.0
+JobStatus = 4
+x509UserProxyEmail = "mircho.nikolaev.rodozov@cern.ch"
+DAGManJobId = 1183604
+RemoteWallClockTime = 31976.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+LastRemoteHost = "glidein_11321_920434792@red-d23n7.unl.edu"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 4111436
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "14"
+CRAB_localOutputFiles = "results.root=results_3.root"
+MaxHosts = 1
+CRAB_UserHN = "mrodozov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm.unl.edu"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms3850"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 1756756
+MATCH_EXP_Used_Gatekeeper = "red-gw1.unl.edu red-gw1.unl.edu:9619"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "red-gw1.unl.edu"
+MATCH_GLIDEIN_Gatekeeper = "red-gw1.unl.edu red-gw1.unl.edu:9619"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "Nebraska"
+UserLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 42
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1440081300
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.3"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_3.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_3.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "results.root" }
+AutoClusterId = 16278
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439550850
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 3
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "Nebraska"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_CH_CERN"
+ClusterId = 1235991
+BytesSent = 604821.0
+CRAB_PublishName = "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_US_Nebraska"
+MATCH_GLIDEIN_MaxMemMBs = 2500
+RequestMemory = 2000
+EnteredCurrentStatus = 1440113503
+MATCH_GLIDEIN_SiteWMS = "HTCondor"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "5096573.0"
+CRAB_JobSW = "CMSSW_7_4_7_patch2"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 1400
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "slot1_32@red-d23n7.unl.edu"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440113503
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440665510
+NiceUser = false
+RootDir = "/"
+CommittedTime = 31976
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "HTCondor"
+


[08/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java
new file mode 100644
index 0000000..88b7a32
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/FunctionalTester.java
@@ -0,0 +1,1186 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.test;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.asterix.external.classad.AMutableCharArrayString;
+import org.apache.asterix.external.classad.ClassAd;
+import org.apache.asterix.external.classad.ClassAdUnParser;
+import org.apache.asterix.external.classad.ExprList;
+import org.apache.asterix.external.classad.ExprTree;
+import org.apache.asterix.external.classad.ExprTree.NodeKind;
+import org.apache.asterix.external.classad.ExprTreeHolder;
+import org.apache.asterix.external.classad.Lexer.TokenType;
+import org.apache.asterix.external.classad.PrettyPrint;
+import org.apache.asterix.external.classad.StringLexerSource;
+import org.apache.asterix.external.classad.Value;
+import org.apache.asterix.external.library.ClassAdParser;
+import org.apache.asterix.om.base.AMutableString;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class FunctionalTester {
+
+    public static Map<String, Variable> variables = new HashMap<String, FunctionalTester.Variable>();
+    public static boolean haveCachedLine = false;
+    public static String cachedLine = "";
+
+    public static class Variable {
+        public String name;
+        public boolean isTree; // If false, then is value
+        public ExprTreeHolder tree;
+        public Value value;
+
+        public Variable(String name, ExprTree tree) {
+            this.name = name;
+            this.tree = new ExprTreeHolder(tree);
+            this.isTree = true;
+        }
+
+        public Variable() {
+            this.name = null;
+            this.tree = new ExprTreeHolder(null);
+            this.isTree = false;
+        }
+
+        public Variable(String name, Value value) {
+            this.name = name;
+            this.value = value;
+            this.isTree = false;
+            this.tree = null;
+        }
+
+        public void getStringRepresentation(AMutableCharArrayString representation) throws HyracksDataException {
+            ClassAdUnParser unparser = new PrettyPrint();
+
+            if (isTree) {
+                unparser.unparse(representation, tree);
+            } else {
+                unparser.unparse(representation, value);
+            }
+            return;
+        }
+
+    }
+
+    /*--------------------------------------------------------------------
+     *
+     * Private Data Types
+     *
+     *--------------------------------------------------------------------*/
+
+    public enum Command {
+        cmd_NoCommand,
+        cmd_InvalidCommand,
+        cmd_Let,
+        cmd_Eval,
+        cmd_Print,
+        cmd_Same,
+        cmd_Sameq,
+        cmd_Diff,
+        cmd_Diffq,
+        cmd_Set,
+        cmd_Show,
+        cmd_Writexml,
+        cmd_Readxml,
+        cmd_Echo,
+        cmd_Help,
+        cmd_Quit
+    };
+
+    public enum PrintFormat {
+        print_Compact,
+        print_Pretty,
+        print_XML,
+        print_XMLPretty
+    };
+
+    public static class Parameters {
+        public boolean debug;
+        public boolean verbose;
+        public boolean interactive;
+        public BufferedReader inputFile;
+
+        public Parameters() {
+            inputFile = null;
+        }
+
+        /*********************************************************************
+         * Function: Parameters::ParseCommandLine
+         * Purpose: This parses the command line. Note that it will exit
+         * if there are any problems.
+         *
+         * @throws IOException
+         *********************************************************************/
+        public void parseCommandLine(int argc, String[] argv) throws IOException {
+            // First we set up the defaults.
+            debug = false;
+            verbose = false;
+            interactive = true;
+            inputFile = null;
+
+            // Then we parse to see what the user wants.
+            for (int argIndex = 1; argIndex < argc; argIndex++) {
+                if (argv[argIndex].equalsIgnoreCase("-d") || argv[argIndex].equalsIgnoreCase("-debug")) {
+                    debug = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-v") || argv[argIndex].equalsIgnoreCase("-verbose")) {
+                    verbose = true;
+                } else {
+                    if (inputFile == null) {
+                        interactive = false;
+                        inputFile = Files.newBufferedReader(Paths.get(argv[argIndex]), StandardCharsets.UTF_8);
+                    }
+                }
+            }
+            return;
+        }
+    }
+
+    public static class State {
+        public int number_of_errors;
+        public int lineNumber;
+        public PrintFormat format;
+
+        public State() {
+            number_of_errors = 0;
+            lineNumber = 0;
+            format = PrintFormat.print_Compact;
+        }
+    }
+
+    // typedef map<string, Variable *> VariableMap;
+
+    /*--------------------------------------------------------------------
+     *
+     * Private Functions
+     *
+     *--------------------------------------------------------------------*/
+
+    public static int test(int argc, String[] argv) throws IOException {
+        // here
+        boolean quit;
+        AMutableString line = new AMutableString(null);
+        State state = new State();
+        Parameters parameters = new Parameters();
+
+        print_version();
+        parameters.parseCommandLine(argc, argv);
+        quit = false;
+
+        while (!quit && readLine(line, state, parameters) == true) {
+            boolean good_line;
+            Command command;
+
+            good_line = replace_variables(line, state, parameters);
+            if (good_line) {
+                command = get_command(line, parameters);
+                quit = handle_command(command, line, state, parameters);
+            }
+        }
+        print_final_state(state);
+
+        if (!parameters.interactive && parameters.inputFile != null) {
+            parameters.inputFile.close();
+        }
+
+        if (state.number_of_errors == 0) {
+            return 0;
+        } else {
+            return 1;
+        }
+    }
+
+    /*********************************************************************
+     * Function: read_line
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static boolean readLine(AMutableString line, State state, Parameters parameters) throws IOException {
+        boolean haveInput;
+
+        if (parameters.interactive) {
+            haveInput = read_line_stdin(line, state, parameters);
+        } else {
+            haveInput = read_line_file(line, state, parameters);
+        }
+        return haveInput;
+    }
+
+    /*********************************************************************
+     * Function: read_line_stdin
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static boolean read_line_stdin(AMutableString line, State state, Parameters parameters) throws IOException {
+        System.out.print("> ");
+        BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
+        String line_segment = br.readLine();
+        if (line_segment.length() == 0 || line_segment.equalsIgnoreCase("q")) {
+            line.setValue("");
+            return false;
+        }
+        if (line_is_comment(line_segment)) {
+            // ignore comments, should we read another line?
+            line.setValue("");
+            return true;
+        } else {
+            line.setValue(line.getStringValue() + line_segment);
+            state.lineNumber++;
+            return true;
+        }
+    }
+
+    /*********************************************************************
+     * Function: read_line_file
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static boolean read_line_file(AMutableString line, State state, Parameters parameters) throws IOException {
+        boolean have_input;
+
+        // We read a line, either from our one-line cache, or the file.
+        if (!haveCachedLine) {
+            cachedLine = parameters.inputFile.readLine();
+            state.lineNumber++;
+            haveCachedLine = true;
+        } else {
+            // We have a cached-line, but we need to increment the line number for it.
+            // We don't increment it until we use it.
+            state.lineNumber++;
+        }
+        if (cachedLine == null) {
+            cachedLine = parameters.inputFile.readLine();
+            if (cachedLine == null) {
+                have_input = false;
+                haveCachedLine = false;
+            } else {
+                line.setValue(cachedLine);
+                have_input = true;
+                haveCachedLine = false;
+                cachedLine = null;
+            }
+        } else {
+            line.setValue(cachedLine);
+            have_input = true;
+            haveCachedLine = false;
+        }
+
+        // If we actually have a non-comment line, we read another line
+        // from the file If it begins with a whitespace character, then we
+        // append it to the previous line, otherwise we cache it for the
+        // next time we call this function.
+        if (have_input) {
+            if (line_is_comment(line.getStringValue())) {
+                line.setValue("");
+            } else {
+                boolean done = false;
+                while (!done) {
+                    cachedLine = parameters.inputFile.readLine();
+                    if (cachedLine != null && line_is_comment(cachedLine)) {
+                        // ignore comments
+                        state.lineNumber++;
+                    } else if (cachedLine != null && cachedLine.length() == 0) {
+                        line.setValue(line.getStringValue() + " ");
+                        state.lineNumber++;
+                    } else if (cachedLine != null && Character.isWhitespace(cachedLine.charAt(0))) {
+                        line.setValue(line.getStringValue() + cachedLine);
+                        state.lineNumber++;
+                    } else {
+                        done = true;
+                        haveCachedLine = cachedLine != null ? true : false;
+                    }
+                }
+            }
+        }
+        return have_input;
+    }
+
+    /*********************************************************************
+     * Function: replace_variables
+     * Purpose:
+     *
+     * @throws HyracksDataException
+     *********************************************************************/
+    public static boolean replace_variables(AMutableString mutableLine, State state, Parameters parameters)
+            throws HyracksDataException {
+        boolean good_line;
+        String error;
+
+        good_line = true;
+        error = "";
+        Variable var = new Variable();
+        for (;;) {
+            int dollar;
+            int current_position;
+            String variable_name;
+            AMutableCharArrayString variable_value = new AMutableCharArrayString();
+            current_position = 0;
+            dollar = mutableLine.getStringValue().indexOf('$', current_position);
+            if (dollar < 0) {
+                break;
+            }
+            current_position = dollar + 1;
+            if (!Character.isAlphabetic(mutableLine.getStringValue().charAt(current_position))) {
+                good_line = false;
+                error = "Bad variable name.";
+                break;
+            }
+            current_position++;
+            while (Character.isLetterOrDigit((mutableLine.getStringValue().charAt(current_position)))
+                    || mutableLine.getStringValue().charAt(current_position) == '_') {
+                current_position++;
+            }
+
+            variable_name = mutableLine.getStringValue().substring(dollar + 1, current_position);
+            var = variables.get(variable_name);
+            if (var == null) {
+                good_line = false;
+                error = "Unknown variable '$";
+                error += variable_name;
+                error += "'";
+                break;
+            }
+            var.getStringRepresentation(variable_value);
+
+            // We have to be careful with substr() because with gcc 2.96, it likes to
+            // assert/except if you give it values that are too large.
+            String end;
+            if (current_position < mutableLine.getStringValue().length()) {
+                end = mutableLine.getStringValue().substring(current_position);
+            } else {
+                end = "";
+            }
+            mutableLine.setValue(mutableLine.getStringValue().substring(0, dollar) + variable_value.toString() + end);
+        }
+
+        if (parameters.debug) {
+            System.err.println("# after replacement: " + mutableLine.getStringValue());
+        }
+
+        if (!good_line) {
+            print_error_message(error, state);
+        }
+        return good_line;
+    }
+
+    /*********************************************************************
+     * Function: get_command
+     * Purpose:
+     *********************************************************************/
+    public static Command get_command(AMutableString line, Parameters parameters) {
+        int current_position;
+        int length;
+        String command_name;
+        Command command;
+
+        current_position = 0;
+        length = line.getStringValue().length();
+        command_name = "";
+        command = Command.cmd_NoCommand;
+
+        // Skip whitespace
+        while (current_position < length && Character.isWhitespace(line.getStringValue().charAt(current_position))) {
+            current_position++;
+        }
+        // Find command name
+        while (current_position < length && Character.isAlphabetic(line.getStringValue().charAt(current_position))) {
+            command_name += line.getStringValue().charAt(current_position);
+            current_position++;
+        }
+        // Figure out what the command is.
+        if (command_name.length() == 0) {
+            command = Command.cmd_NoCommand;
+        } else if (command_name.equalsIgnoreCase("let")) {
+            command = Command.cmd_Let;
+        } else if (command_name.equalsIgnoreCase("eval")) {
+            command = Command.cmd_Eval;
+        } else if (command_name.equalsIgnoreCase("print")) {
+            command = Command.cmd_Print;
+        } else if (command_name.equalsIgnoreCase("same")) {
+            command = Command.cmd_Same;
+        } else if (command_name.equalsIgnoreCase("sameq")) {
+            command = Command.cmd_Sameq;
+        } else if (command_name.equalsIgnoreCase("diff")) {
+            command = Command.cmd_Diff;
+        } else if (command_name.equalsIgnoreCase("diffq")) {
+            command = Command.cmd_Diffq;
+        } else if (command_name.equalsIgnoreCase("set")) {
+            command = Command.cmd_Set;
+        } else if (command_name.equalsIgnoreCase("show")) {
+            command = Command.cmd_Show;
+        } else if (command_name.equalsIgnoreCase("writexml")) {
+            command = Command.cmd_Writexml;
+        } else if (command_name.equalsIgnoreCase("readxml")) {
+            command = Command.cmd_Readxml;
+        } else if (command_name.equalsIgnoreCase("echo")) {
+            command = Command.cmd_Echo;
+        } else if (command_name.equalsIgnoreCase("help")) {
+            command = Command.cmd_Help;
+        } else if (command_name.equalsIgnoreCase("quit")) {
+            command = Command.cmd_Quit;
+        } else {
+            command = Command.cmd_InvalidCommand;
+        }
+        shorten_line(line, current_position);
+        return command;
+    }
+
+    /*********************************************************************
+     * Function: handle_command
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static boolean handle_command(Command command, AMutableString line, State state, Parameters parameters)
+            throws IOException {
+        boolean quit = false;
+
+        switch (command) {
+            case cmd_NoCommand:
+                // Ignore. This isn't a problem.
+                break;
+            case cmd_InvalidCommand:
+                print_error_message("Unknown command on line", state);
+                break;
+            case cmd_Let:
+                handle_let(line, state, parameters);
+                break;
+            case cmd_Eval:
+                handle_eval(line, state, parameters);
+                break;
+            case cmd_Print:
+                handle_print(line, state, parameters);
+                break;
+            case cmd_Same:
+                handle_same(line, state, parameters);
+                break;
+            case cmd_Sameq:
+                handle_sameq(line, state, parameters);
+                break;
+            case cmd_Diff:
+                handle_diff(line, state, parameters);
+                break;
+            case cmd_Diffq:
+                handle_diffq(line, state, parameters);
+                break;
+            case cmd_Set:
+                handle_set(line, state, parameters);
+                break;
+            case cmd_Show:
+                handle_show(line, state, parameters);
+                break;
+            case cmd_Writexml:
+                // handle_writexml(line, state, parameters);
+                break;
+            case cmd_Readxml:
+                // handle_readxml(line, state, parameters);
+                break;
+            case cmd_Echo:
+                handle_echo(line.getStringValue(), state, parameters);
+                break;
+            case cmd_Help:
+                handle_help();
+                break;
+            case cmd_Quit:
+                quit = true;
+                break;
+        }
+        return quit;
+    }
+
+    /*********************************************************************
+     * Function: handle_let
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void handle_let(AMutableString line, State state, Parameters parameters) throws IOException {
+        AMutableString variable_name = new AMutableString(null);
+        ExprTree tree;
+        Variable variable;
+
+        if (get_variable_name(line, true, variable_name, state, parameters)) {
+            tree = get_expr(line, state, parameters);
+            if (tree != null) {
+                variable = new Variable(variable_name.getStringValue(), tree);
+                variables.put(variable_name.getStringValue(), variable);
+                if (parameters.interactive) {
+                    print_expr(tree, state, parameters);
+                }
+            }
+        }
+        return;
+    }
+
+    /*********************************************************************
+     * Function: handle_eval
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void handle_eval(AMutableString line, State state, Parameters parameters) throws IOException {
+        AMutableString variable_name = new AMutableString("");
+        ExprTree tree;
+        Variable variable;
+
+        if (get_variable_name(line, true, variable_name, state, parameters)) {
+            tree = get_expr(line, state, parameters);
+            if (tree != null) {
+                Value value = new Value();
+                if (!evaluate_expr(tree, value, parameters)) {
+                    print_error_message("Couldn't evaluate rvalue", state);
+                } else {
+                    variable = new Variable(variable_name.getStringValue(), value);
+                    variables.put(variable_name.getStringValue(), variable);
+                    if (true) {
+                        System.out.println(variable_name.getStringValue() + " = " + value);
+                    }
+                }
+            }
+        }
+        return;
+    }
+
+    /*********************************************************************
+     * Function: handle_print
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void handle_print(AMutableString line, State state, Parameters parameters) throws IOException {
+        ExprTree tree;
+        tree = get_expr(line, state, parameters);
+        if (tree != null) {
+            print_expr(tree, state, parameters);
+        }
+    }
+
+    /*********************************************************************
+     * Function: handle_same
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void handle_same(AMutableString line, State state, Parameters parameters) throws IOException {
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprTreeHolder tree2 = new ExprTreeHolder();
+        Value value1 = new Value();
+        Value value2 = new Value();
+        try {
+            get_two_exprs(line, tree, tree2, state, parameters);
+            if (tree.getInnerTree() != null || tree2.getInnerTree() != null) {
+
+                if (parameters.debug) {
+                    System.out.println("Sameeval has two trees:");
+                    System.out.print(" ");
+                    print_expr(tree, state, parameters);
+                    System.out.print(" ");
+                    print_expr(tree2, state, parameters);
+                }
+                if (!evaluate_expr(tree, value1, parameters)) {
+                    print_error_message("Couldn't evaluate first expression.\n", state);
+                } else if (!evaluate_expr(tree2, value2, parameters)) {
+                    print_error_message("Couldn't evaluate second expressions.\n", state);
+                } else if (!value1.sameAs(value2)) {
+                    print_error_message("the expressions are different.", state);
+                    assert (false);
+                }
+                if (parameters.debug) {
+                    System.out.println("They evaluated to: ");
+                    System.out.println(" " + value1);
+                    System.out.println(" " + value2);
+                }
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            assert (false);
+        }
+        return;
+    }
+
+    /*********************************************************************
+     * Function: handle_sameq
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void handle_sameq(AMutableString line, State state, Parameters parameters) throws IOException {
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprTreeHolder tree2 = new ExprTreeHolder();
+
+        get_two_exprs(line, tree, tree2, state, parameters);
+        if (tree.getInnerTree() != null || tree2.getInnerTree() != null) {
+            if (!tree.sameAs(tree2)) {
+                print_error_message("the expressions are different.", state);
+            }
+        }
+        return;
+    }
+
+    /*********************************************************************
+     * Function: handle_diff
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void handle_diff(AMutableString line, State state, Parameters parameters) throws IOException {
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprTreeHolder tree2 = new ExprTreeHolder();
+        Value value1 = new Value();
+        Value value2 = new Value();
+
+        get_two_exprs(line, tree, tree2, state, parameters);
+        if (tree.getInnerTree() != null || tree2.getInnerTree() != null) {
+            if (!evaluate_expr(tree, value1, parameters)) {
+                print_error_message("Couldn't evaluate first expression.\n", state);
+            } else if (!evaluate_expr(tree2, value2, parameters)) {
+                print_error_message("Couldn't evaluate second expressions.\n", state);
+            } else if (value1.sameAs(value2)) {
+                print_error_message("the expressions are the same.", state);
+                assert (false);
+            }
+        }
+        return;
+    }
+
+    /*********************************************************************
+     * Function: handle_diffq
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void handle_diffq(AMutableString line, State state, Parameters parameters) throws IOException {
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprTreeHolder tree2 = new ExprTreeHolder();
+
+        get_two_exprs(line, tree, tree2, state, parameters);
+        if (tree.getInnerTree() != null || tree2.getInnerTree() != null) {
+            if (tree.sameAs(tree2)) {
+                print_error_message("the expressions are the same.", state);
+            }
+        }
+        return;
+    }
+
+    /*********************************************************************
+     * Function: handle_set
+     * Purpose:
+     *********************************************************************/
+    public static void handle_set(AMutableString line, State state, Parameters parameters) {
+        AMutableString option_name = new AMutableString(null);
+        AMutableString option_value = new AMutableString(null);
+
+        if (get_variable_name(line, false, option_name, state, parameters)) {
+            if (get_variable_name(line, false, option_value, state, parameters)) {
+                if (option_name.getStringValue().equalsIgnoreCase("format")) {
+                    if (option_value.getStringValue().equalsIgnoreCase("compact")) {
+                        state.format = PrintFormat.print_Compact;
+                    } else if (option_value.getStringValue().equalsIgnoreCase("pretty")) {
+                        state.format = PrintFormat.print_Pretty;
+                    } else if (option_value.getStringValue().equalsIgnoreCase("xml")) {
+                        state.format = PrintFormat.print_XML;
+                    } else if (option_value.getStringValue().equalsIgnoreCase("xmlpretty")) {
+                        state.format = PrintFormat.print_XMLPretty;
+                    } else {
+                        print_error_message("Unknown print format. Use compact, pretty, xml, or xmlpretty", state);
+                    }
+                } else {
+                    print_error_message("Unknown option. The only option currently available is format", state);
+                }
+            }
+        }
+        return;
+    }
+
+    /*********************************************************************
+     * Function: handle_show
+     * Purpose:
+     *********************************************************************/
+    public static void handle_show(AMutableString line, State state, Parameters parameters) {
+        AMutableString option_name = new AMutableString(null);
+
+        if (get_variable_name(line, false, option_name, state, parameters)) {
+            if (option_name.getStringValue().equalsIgnoreCase("format")) {
+                System.out.print("Format: ");
+                switch (state.format) {
+                    case print_Compact:
+                        System.out.print("Traditional Compact\n");
+                        break;
+                    case print_Pretty:
+                        System.out.print("Traditional Pretty\n");
+                        break;
+                    case print_XML:
+                        System.out.print("XML Compact\n");
+                        break;
+                    case print_XMLPretty:
+                        System.out.print("XML Pretty\n");
+                        break;
+                }
+            } else {
+                print_error_message("Unknown option. The only option currently available is format", state);
+            }
+        }
+
+        return;
+    }
+
+    /*********************************************************************
+     * Function: handle_writexml
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    /*
+    public static void handle_writexml(AMutableString line, State state, Parameters parameters) throws IOException {
+    AMutableString filename = new AMutableString("");
+    ExprTree expr;
+    BufferedWriter xml_file;
+
+    if (get_file_name(line, filename, state, parameters)) {
+    if ((expr = get_expr(line, state, parameters)) != null) {
+    if (expr_okay_for_xml_file(expr, state, parameters)) {
+    xml_file = Files.newBufferedWriter(Paths.get(filename.getStringValue()));
+    ClassAdXMLUnParser unparser = new ClassAdXMLUnParser();
+    AMutableCharArrayString classad_text = new AMutableCharArrayString();
+    xml_file.write("<classads>\n");
+    if (expr.getKind() == NodeKind.CLASSAD_NODE) {
+    unparser.Unparse(classad_text, expr);
+    xml_file.write(classad_text.toString());
+    } else {
+    ExprList list = (ExprList) expr;
+    for (ExprTree classad : list.getExprList()) {
+    classad_text.setValue("");
+    unparser.Unparse(classad_text, classad);
+    xml_file.write(classad_text.toString());
+    xml_file.newLine();
+    }
+    }
+    xml_file.write("</classads>\n");
+    xml_file.close();
+    }
+    }
+    }
+    return;
+    }
+
+    *//*********************************************************************
+      * Function: handle_readxml
+      * Purpose:
+      *
+      * @throws IOException
+      *********************************************************************/
+
+    /*
+    public static void handle_readxml(AMutableString line, State state, Parameters parameters) throws IOException {
+    AMutableString variable_name = new AMutableString(null);
+    AMutableString file_name = new AMutableString(null);
+
+    if (get_variable_name(line, false, variable_name, state, parameters)) {
+    if (get_file_name(line, file_name, state, parameters)) {
+    InputStream xml_file = Files.newInputStream(Paths.get(file_name.getStringValue()));
+    ExprList list;
+    ClassAd classad;
+    ClassAdXMLParser parser = new ClassAdXMLParser();
+    Variable variable;
+
+    list = new ExprList();
+    do {
+    classad = parser.ParseClassAd(new InputStreamLexerSource(xml_file));
+    if (classad != null) {
+    list.add(classad);
+    }
+    } while (classad != null);
+    variable = new Variable(variable_name.getStringValue(), list);
+    variables.put(variable_name.getStringValue(), variable);
+    if (parameters.interactive) {
+    print_expr(list, state, parameters);
+    }
+    }
+    }
+    }
+
+    *//*********************************************************************
+      * Function: handle_echo
+      * Purpose:
+      *********************************************************************/
+    public static void handle_echo(String line, State state, Parameters parameters) {
+        AMutableCharArrayString new_line = new AMutableCharArrayString();
+        int index;
+
+        index = 0;
+
+        while (index < line.length() && Character.isWhitespace(line.charAt(index))) {
+            index++;
+        }
+        while (index < line.length()) {
+            new_line.appendChar(line.charAt(index));
+            index++;
+        }
+
+        System.out.println(new_line.toString());
+    }
+
+    /*********************************************************************
+     * Function: handle_help
+     * Purpose:
+     *********************************************************************/
+    public static void handle_help() {
+        print_version();
+
+        System.out.println();
+        System.out.println("Commands:");
+        System.out.println("let name = expr   Set a variable to an unevaluated expression.");
+        System.out.println("eval name = expr  Set a variable to an evaluated expression.");
+        System.out.println("same expr1 expr2  Prints a message only if expr1 and expr2 are different.");
+        System.out.println("sameq expr1 expr2 Prints a message only if expr1 and expr2 are different.");
+        System.out.println("                   same evaluates its expressions first, sameq doesn't.");
+        System.out.println("diff expr1 expr2  Prints a message only if expr1 and expr2 are the same.");
+        System.out.println("diffq expr1 expr2 Prints a message only if expr1 and expr2 are the same.");
+        System.out.println("                   diff evaluates its expressions first, diffq doesn't.");
+        System.out.println("set opt value     Sets an option to a particular value.");
+        System.out.println("quit              Exit this program.");
+        System.out.println("help              Print this message.");
+        System.out.println();
+        System.out.println("Options (for the set command):");
+        System.out.println("format              Set the way ClassAds print.");
+        System.out.println("  compact           A compact, traditional style");
+        System.out.println("  pretty            Traditional, with more spaces");
+        System.out.println("  xml               A compact XML representation");
+        System.out.println("  xmlpretty         XML with extra spacing for readability.");
+        return;
+    }
+
+    /*********************************************************************
+     * Function: get_variable_name
+     * Purpose:
+     *********************************************************************/
+    public static boolean get_variable_name(AMutableString line, boolean swallow_equals, AMutableString variable_name,
+            State state, Parameters parameters) {
+        int current_position;
+        int length;
+        boolean have_good_name;
+
+        current_position = 0;
+        length = line.getStringValue().length();
+        variable_name.setValue("");
+        have_good_name = false;
+
+        // Skip whitespace
+        while (current_position < length && Character.isWhitespace(line.getStringValue().charAt(current_position))) {
+            current_position++;
+        }
+        // Find variable name
+        if (current_position < length && Character.isAlphabetic(line.getStringValue().charAt(current_position))) {
+            variable_name.setValue(variable_name.getStringValue() + line.getStringValue().charAt(current_position));
+            current_position++;
+            // As soon as we have at least one character in the name, it's good.
+            have_good_name = true;
+
+            while (current_position < length
+                    && (Character.isLetterOrDigit(line.getStringValue().charAt(current_position))
+                            || line.getStringValue().charAt(current_position) == '_')) {
+                variable_name.setValue(variable_name.getStringValue() + line.getStringValue().charAt(current_position));
+                current_position++;
+            }
+        }
+        if (!have_good_name) {
+            print_error_message("Bad variable name", state);
+        } else if (swallow_equals) {
+            // Skip whitespace
+            while (current_position < length
+                    && Character.isWhitespace(line.getStringValue().charAt(current_position))) {
+                current_position++;
+            }
+            if (line.getStringValue().charAt(current_position) == '=') {
+                current_position++;
+            } else {
+                print_error_message("Missing equal sign", state);
+                have_good_name = false;
+            }
+        }
+
+        if (parameters.debug) {
+            if (have_good_name) {
+                System.out.println("# Got variable name: " + variable_name);
+            } else {
+                System.out.println("# Bad variable name: " + variable_name);
+            }
+        }
+
+        shorten_line(line, current_position);
+        return have_good_name;
+    }
+
+    /*********************************************************************
+     * Function: get_file_name
+     * Purpose:
+     *********************************************************************/
+    public static boolean get_file_name(AMutableString line, AMutableString variable_name, State state,
+            Parameters parameters) {
+        int current_position;
+        int length;
+        boolean have_good_name;
+
+        current_position = 0;
+        length = line.getStringValue().length();
+        variable_name.setValue("");
+        have_good_name = false;
+
+        // Skip whitespace
+        while (current_position < length && Character.isWhitespace(line.getStringValue().charAt(current_position))) {
+            current_position++;
+        }
+        // Find file name
+        while (current_position < length && (!Character.isWhitespace(line.getStringValue().charAt(current_position)))) {
+            have_good_name = true;
+            variable_name.setValue(variable_name.getStringValue() + line.getStringValue().charAt(current_position));
+            current_position++;
+        }
+        if (!have_good_name) {
+            print_error_message("Bad file name", state);
+        }
+
+        if (parameters.debug) {
+            if (have_good_name) {
+                System.out.println("# Got file name: " + variable_name.getStringValue());
+            } else {
+                System.out.println("# Bad file name: " + variable_name.getStringValue());
+            }
+        }
+
+        shorten_line(line, current_position);
+        return have_good_name;
+    }
+
+    /*********************************************************************
+     * Function: get_expr
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static ExprTree get_expr(AMutableString line, State state, Parameters parameters) throws IOException {
+        int offset;
+        ExprTree tree;
+        ClassAdParser parser = new ClassAdParser();
+        StringLexerSource lexer_source = new StringLexerSource(line.getStringValue());
+
+        tree = parser.parseExpression(lexer_source, false);
+        offset = lexer_source.getCurrentLocation();
+        shorten_line(line, offset);
+
+        if (tree == null) {
+            print_error_message("Missing expression", state);
+        }
+
+        return tree;
+    }
+
+    /*********************************************************************
+     * Function: get_two_exprs
+     * Purpose:
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void get_two_exprs(AMutableString line, ExprTreeHolder tree1, ExprTreeHolder tree2, State state,
+            Parameters parameters) throws IOException {
+        int offset;
+        ClassAdParser parser = new ClassAdParser();
+        StringLexerSource lexer_source = new StringLexerSource(line.getStringValue());
+
+        tree1.setInnerTree(parser.parseExpression(lexer_source, false));
+        if (tree1.getInnerTree() == null) {
+            print_error_message("Couldn't parse first expression.", state);
+            tree2.setInnerTree(null);
+            throw new IOException();
+        } else {
+            if (parameters.debug) {
+                System.out.print("# Tree1: ");
+                print_expr(tree1, state, parameters);
+            }
+
+            if (parser.peekToken() != TokenType.LEX_COMMA) {
+                print_error_message("Missing comma.\n", state);
+                tree1.setInnerTree(null);
+                tree2.setInnerTree(null);;
+            } else {
+                parser.consumeToken();
+                tree2.setInnerTree(parser.parseNextExpression());
+                offset = lexer_source.getCurrentLocation();
+                shorten_line(line, offset);
+                if (tree2.getInnerTree() == null) {
+                    print_error_message("Couldn't parse second expression.", state);
+                    tree1.setInnerTree(null);
+                    throw new IOException();
+                } else if (parameters.debug) {
+                    System.out.print("# Tree2: ");
+                    print_expr(tree2, state, parameters);
+                    System.out.print("# Tree1: ");
+                    print_expr(tree1, state, parameters);
+                    System.out.println();
+                }
+            }
+        }
+
+        return;
+    }
+
+    /*********************************************************************
+     * Function: print_expr
+     * Purpose:
+     *
+     * @throws HyracksDataException
+     *********************************************************************/
+    public static void print_expr(ExprTree tree, State state, Parameters parameters) throws HyracksDataException {
+        AMutableCharArrayString output = new AMutableCharArrayString();
+
+        if (state.format == PrintFormat.print_Compact) {
+            ClassAdUnParser unparser = new ClassAdUnParser();
+            unparser.unparse(output, tree);
+        } else if (state.format == PrintFormat.print_Pretty) {
+            PrettyPrint unparser = new PrettyPrint();
+            unparser.unparse(output, tree);
+        } else if (state.format == PrintFormat.print_XML) {/*
+                                                           ClassAdXMLUnParser unparser = new ClassAdXMLUnParser();
+                                                           unparser.SetCompactSpacing(true);
+                                                           unparser.Unparse(output, tree);
+                                                           } else if (state.format == PrintFormat.print_XMLPretty) {
+                                                           ClassAdXMLUnParser unparser = new ClassAdXMLUnParser();
+                                                           unparser.SetCompactSpacing(false);
+                                                           unparser.Unparse(output, tree);*/
+        }
+        System.out.println(output);
+    }
+
+    /*********************************************************************
+     * Function: evaluate_expr
+     * Purpose:
+     *
+     * @throws HyracksDataException
+     *********************************************************************/
+    public static boolean evaluate_expr(ExprTree tree, Value value, Parameters parameters) throws HyracksDataException {
+        ClassAd classad = new ClassAd();
+        boolean success = false;;
+        classad.insert("internal___", tree);
+        success = classad.evaluateAttr("internal___", value);
+        classad.remove("internal___");
+        return success;
+    }
+
+    /*********************************************************************
+     * Function: shorten_line
+     * Purpose:
+     *********************************************************************/
+    public static void shorten_line(AMutableString line, int offset) {
+        // We have to be careful with substr() because with gcc 2.96, it likes to
+        // assert/except if you give it values that are too large.
+        if (offset < line.getStringValue().length()) {
+            line.setValue(line.getStringValue().substring(offset));
+        } else {
+            line.setValue("");
+        }
+    }
+
+    /*********************************************************************
+     * Function: print_version
+     * Purpose:
+     *********************************************************************/
+    public static void print_version() {
+        AMutableString classad_version = new AMutableString(null);
+        ClassAd.classAdLibraryVersion(classad_version);
+        System.out.println("ClassAd Functional Tester v" + classad_version.getStringValue());
+        return;
+    }
+
+    /*********************************************************************
+     * Function: print_error_message
+     * Purpose:
+     *********************************************************************/
+
+    /*********************************************************************
+     * Function: print_error_message
+     * Purpose:
+     *********************************************************************/
+    public static void print_error_message(String error, State state) {
+        System.out.println("* Line " + state.lineNumber + ": " + error);
+        state.number_of_errors++;
+    }
+
+    /*********************************************************************
+     * Function: print_final_state
+     * Purpose:
+     *********************************************************************/
+    public static void print_final_state(State state) {
+        if (state.number_of_errors == 0) {
+            System.out.println("No errors.");
+        } else if (state.number_of_errors == 1) {
+            System.out.println("1 error.");
+        } else {
+            System.out.println(state.number_of_errors + " errors");
+        }
+        return;
+    }
+
+    public static boolean line_is_comment(String line) {
+        boolean is_comment;
+
+        if (line.length() > 1 && line.charAt(0) == '/' && line.charAt(1) == '/') {
+            is_comment = true;
+        } else {
+            is_comment = false;
+        }
+        return is_comment;
+    }
+
+    public static boolean expr_okay_for_xml_file(ExprTree tree, State state, Parameters parameters) {
+        boolean is_okay;
+
+        if (tree.getKind() == NodeKind.CLASSAD_NODE) {
+            is_okay = true;
+        } else if (tree.getKind() != NodeKind.EXPR_LIST_NODE) {
+            is_okay = false;
+            System.out.println("We have " + tree.getKind().ordinal());
+        } else {
+            ExprList list = (ExprList) tree;
+            is_okay = true;
+            for (ExprTree element : list.getExprList()) {
+
+                if (element.getKind() != NodeKind.CLASSAD_NODE) {
+                    System.out.println("Inside list, we have " + tree.getKind().ordinal());
+                    is_okay = false;
+                    break;
+                }
+            }
+        }
+        if (!is_okay) {
+            print_error_message("writexml requires a ClassAd or list of ClassAds as an argument.", state);
+        }
+        return is_okay;
+    }
+
+}



[10/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/PrettyPrint.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/PrettyPrint.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/PrettyPrint.java
new file mode 100644
index 0000000..e738bb9
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/PrettyPrint.java
@@ -0,0 +1,257 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.asterix.external.classad.ExprTree.NodeKind;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class PrettyPrint extends ClassAdUnParser {
+    private int classadIndent;
+    private int listIndent;
+    private boolean wantStringQuotes;
+    private boolean minimalParens;
+    private int indentLevel;
+
+    public PrettyPrint() {
+        classadIndent = 4;
+        listIndent = 3;
+        wantStringQuotes = true;
+        minimalParens = false;
+        indentLevel = 0;
+    }
+
+    /// Set the indentation width for displaying lists
+    public void setListIndentation() {
+        // default is 4
+        setListIndentation(4);
+    }
+
+    public void setClassAdIndentation(int len) {
+        classadIndent = len;
+    }
+
+    public int setClassAdIndentation() {
+        return (classadIndent);
+    }
+
+    public void setListIndentation(int len) {
+        listIndent = len;
+    }
+
+    public int getListIndentation() {
+        return (listIndent);
+    }
+
+    public void setWantStringQuotes(boolean b) {
+        wantStringQuotes = b;
+    }
+
+    public boolean getWantStringQuotes() {
+        return (wantStringQuotes);
+    }
+
+    public void setMinimalParentheses(boolean b) {
+        minimalParens = b;
+    }
+
+    public boolean getMinimalParentheses() {
+        return (minimalParens);
+    }
+
+    @Override
+    public void unparseAux(AMutableCharArrayString buffer, int op, ExprTreeHolder op1, ExprTreeHolder op2,
+            ExprTreeHolder op3) throws HyracksDataException {
+        if (!minimalParens) {
+            super.unparseAux(buffer, op, op1, op2, op3);
+            return;
+        }
+
+        // case 0: parentheses op
+        if (op == Operation.OpKind_PARENTHESES_OP) {
+            unparse(buffer, op1);
+            return;
+        }
+        // case 1: check for unary ops
+        if (op == Operation.OpKind_UNARY_PLUS_OP || op == Operation.OpKind_UNARY_MINUS_OP
+                || op == Operation.OpKind_LOGICAL_NOT_OP || op == Operation.OpKind_BITWISE_NOT_OP) {
+            buffer.appendString(opString[op]);
+            unparse(buffer, op1);
+            return;
+        }
+        // case 2: check for ternary op
+        if (op == Operation.OpKind_TERNARY_OP) {
+            unparse(buffer, op1);
+            buffer.appendString(" ? ");
+            unparse(buffer, op2);
+            buffer.appendString(" : ");
+            unparse(buffer, op3);
+            return;
+        }
+        // case 3: check for subscript op
+        if (op == Operation.OpKind_SUBSCRIPT_OP) {
+            unparse(buffer, op1);
+            buffer.appendChar('[');
+            unparse(buffer, op2);
+            buffer.appendChar(']');
+            return;
+        }
+        // all others are binary ops
+        AMutableInt32 top = new AMutableInt32(0);
+        ExprTreeHolder t1 = new ExprTreeHolder(), t2 = new ExprTreeHolder(), t3 = new ExprTreeHolder();
+
+        if (op1.getKind() == NodeKind.OP_NODE) {
+            ((Operation) op1.getInnerTree()).getComponents(top, t1, t2, t3);
+            if (Operation.precedenceLevel(top.getIntegerValue().intValue()) < Operation.precedenceLevel(op)) {
+                buffer.appendString(" ( ");
+                unparseAux(buffer, top.getIntegerValue().intValue(), t1, t2, t3);
+                buffer.appendString(" ) ");
+            }
+        } else {
+            unparse(buffer, op1);
+        }
+        buffer.appendString(opString[op]);
+        if (op2.getKind() == NodeKind.OP_NODE) {
+            ((Operation) op2.getInnerTree()).getComponents(top, t1, t2, t3);
+            if (Operation.precedenceLevel(top.getIntegerValue().intValue()) < Operation.precedenceLevel(op)) {
+                buffer.appendString(" ( ");
+                unparseAux(buffer, top.getIntegerValue().intValue(), t1, t2, t3);
+                buffer.appendString(" ) ");
+            }
+        } else {
+            unparse(buffer, op2);
+        }
+    }
+
+    @Override
+    public void unparseAux(AMutableCharArrayString buffer, Map<CaseInsensitiveString, ExprTree> attrs)
+            throws HyracksDataException {
+        if (classadIndent > 0) {
+            indentLevel += classadIndent;
+            buffer.appendChar('\n');
+            int i = 0;
+            while (i < indentLevel) {
+                buffer.appendChar(' ');
+                i++;
+            }
+            buffer.appendChar('[');
+            indentLevel += classadIndent;
+        } else {
+            buffer.appendString("[ ");
+        }
+        for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
+            if (classadIndent > 0) {
+                buffer.appendChar('\n');
+                int i = 0;
+                while (i < indentLevel) {
+                    buffer.appendChar(' ');
+                    i++;
+                }
+            }
+            super.unparseAux(buffer, entry.getKey().get());
+            buffer.appendString(" = ");
+            unparse(buffer, entry.getValue());
+            buffer.appendString("; ");
+        }
+        if (buffer.charAt(buffer.getLength() - 2) == ';') {
+            buffer.setLength(buffer.getLength() - 2);
+        }
+        if (classadIndent > 0) {
+            indentLevel -= classadIndent;
+            buffer.appendChar('\n');
+            int i = 0;
+            while (i < indentLevel) {
+                buffer.appendChar(' ');
+                i++;
+            }
+            buffer.appendChar(']');
+            indentLevel -= classadIndent;
+        } else {
+            buffer.appendString(" ]");
+        }
+    }
+
+    @Override
+    public void unparseAux(AMutableCharArrayString buffer, ExprList exprs) throws HyracksDataException {
+        if (listIndent > 0) {
+            indentLevel += listIndent;
+            buffer.appendChar('\n');
+            int i = 0;
+            while (i < indentLevel) {
+                buffer.appendChar(' ');
+                i++;
+            }
+            buffer.appendChar('{');
+            indentLevel += listIndent;
+        } else {
+            buffer.appendString("{ ");
+        }
+        for (ExprTree itr : exprs.getExprList()) {
+            if (listIndent > 0) {
+                int i = 0;
+                buffer.appendChar('\n');
+                while (i < indentLevel) {
+                    buffer.appendChar(' ');
+                    i++;
+                }
+            }
+            super.unparse(buffer, itr);
+            buffer.appendChar(',');;
+        }
+        if (exprs.size() > 0) {
+            buffer.decrementLength();;
+        }
+        if (listIndent > 0) {
+            indentLevel -= listIndent;
+            buffer.appendChar('\n');
+            int i = 0;
+            while (i < indentLevel) {
+                buffer.appendChar(' ');
+                i++;
+            }
+            buffer.appendChar('}');
+            indentLevel -= listIndent;
+        } else {
+            buffer.appendString(" }");
+        }
+    }
+
+    /* Checks whether string qualifies to be a non-quoted attribute */
+    public static boolean identifierNeedsQuoting(String str) {
+        boolean needs_quoting;
+        // must start with [a-zA-Z_]
+        if (!Character.isAlphabetic(str.charAt(0)) && str.charAt(0) != '_') {
+            needs_quoting = true;
+        } else {
+
+            // all other characters must be [a-zA-Z0-9_]
+            int i = 1;
+            while (i < str.length() && (Character.isLetterOrDigit(str.charAt(i)) || str.charAt(i) == '_')) {
+                i++;
+            }
+            // needs quoting if we found a special character
+            // before the end of the string.
+            needs_quoting = str.charAt(i - 1) != '\0';
+        }
+        return needs_quoting;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/StringLexerSource.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/StringLexerSource.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/StringLexerSource.java
new file mode 100644
index 0000000..d5c7589
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/StringLexerSource.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+public class StringLexerSource extends LexerSource {
+    private String aString;
+    private int offset;
+
+    public StringLexerSource(String aString, int offset) {
+        setNewSource(aString, offset);
+    }
+
+    public StringLexerSource(String aString) {
+        setNewSource(aString, 0);
+    }
+
+    @Override
+    public int getPosition() {
+        return offset;
+    }
+
+    public void setNewSource(String aString, int offset) {
+        this.aString = aString;
+        this.offset = offset;
+    }
+
+    @Override
+    public char readCharacter() {
+        if (offset == aString.length()) {
+            previousCharacter = Lexer.EOF;
+            return previousCharacter;
+        } else {
+            previousCharacter = aString.charAt(offset);
+            offset++;
+            return previousCharacter;
+        }
+    }
+
+    @Override
+    public void unreadCharacter() {
+        if (offset > 0) {
+            if (previousCharacter != Lexer.EOF) {
+                offset--;
+            }
+        }
+    }
+
+    @Override
+    public boolean atEnd() {
+        return offset == aString.length();
+    }
+
+    public int getCurrentLocation() {
+        return offset;
+    }
+
+    public void setNewSource(String buffer) {
+        setNewSource(buffer, 0);
+    }
+
+    @Override
+    public char[] getBuffer() {
+        return aString.toCharArray();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/TokenValue.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/TokenValue.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/TokenValue.java
new file mode 100644
index 0000000..af86d9b
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/TokenValue.java
@@ -0,0 +1,157 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import org.apache.asterix.external.classad.Lexer.TokenType;
+import org.apache.asterix.external.classad.Value.NumberFactor;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+
+public class TokenValue {
+    private TokenType tt;
+    private NumberFactor factor;
+    private long intValue;
+    private double realValue;
+    private boolean boolValue;
+    private AMutableCharArrayString strValue;
+    private ClassAdTime time;
+
+    public TokenValue() {
+        tt = TokenType.LEX_TOKEN_ERROR;
+        factor = NumberFactor.NO_FACTOR;
+        intValue = 0;
+        realValue = 0.0;
+        boolValue = false;
+        strValue = new AMutableCharArrayString();
+        time = new ClassAdTime();
+    }
+
+    public void setTokenType(TokenType t) {
+        tt = t;
+    }
+
+    public void setIntValue(long i, NumberFactor f) {
+        intValue = i;
+        factor = f;
+    }
+
+    public void setRealValue(double r, NumberFactor f) {
+        realValue = r;
+        factor = f;
+    }
+
+    public void setBoolValue(boolean b) {
+        boolValue = b;
+    }
+
+    public void setStringValue(char[] str) {
+        strValue.copyValue(str, str.length);
+    }
+
+    public void setStringValue(char[] str, int length) {
+        strValue.copyValue(str, length);
+    }
+
+    public void setStringValue(AMutableCharArrayString aString) {
+        strValue.setValue(aString);
+    }
+
+    public void setAbsTimeValue(ClassAdTime asecs) {
+        time.setValue(asecs);
+    }
+
+    public void setRelTimeValue(double rsecs) {
+        time.setValue((long) rsecs);
+    }
+
+    public TokenType getTokenType() {
+        return tt;
+    }
+
+    public void getIntValue(AMutableInt64 i, AMutableNumberFactor f) {
+        i.setValue(intValue);
+        f.setFactor(factor);
+    }
+
+    public void getRealValue(AMutableDouble r, AMutableNumberFactor f) {
+        r.setValue(realValue);
+        f.setFactor(factor);
+    }
+
+    public void getBoolValue(MutableBoolean b) {
+        b.setValue(boolValue);
+    }
+
+    void getStringValue(AMutableCharArrayString str) {
+        str.copyValue(strValue.getValue(), strValue.getLength());
+    }
+
+    void getAbsTimeValue(ClassAdTime asecs) {
+        asecs.setValue(time);
+    }
+
+    void getRelTimeValue(ClassAdTime rsecs) {
+        rsecs.setValue(time.getRelativeTime());
+    }
+
+    void copyFrom(TokenValue tv) {
+        tt = tv.tt;
+        factor = tv.factor;
+        intValue = tv.intValue;
+        realValue = tv.realValue;
+        boolValue = tv.boolValue;
+        time.setValue(tv.time);
+        strValue.setValue(tv.strValue);
+    }
+
+    public void reset() {
+        tt = TokenType.LEX_TOKEN_ERROR;
+        factor = NumberFactor.NO_FACTOR;
+        intValue = 0;
+        realValue = 0.0;
+        boolValue = false;
+        strValue.reset();
+        time.reset();
+    }
+
+    public NumberFactor getFactor() {
+        return factor;
+    }
+
+    public long getIntValue() {
+        return intValue;
+    }
+
+    public double getRealValue() {
+        return realValue;
+    }
+
+    public boolean getBoolValue() {
+        return boolValue;
+    }
+
+    public AMutableCharArrayString getStrValue() {
+        return strValue;
+    }
+
+    public ClassAdTime getTimeValue() {
+        return time;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Util.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Util.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Util.java
new file mode 100644
index 0000000..cbecb1b
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Util.java
@@ -0,0 +1,262 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.text.DateFormat;
+import java.text.SimpleDateFormat;
+import java.util.Calendar;
+import java.util.Date;
+import java.util.Random;
+import java.util.TimeZone;
+
+import org.apache.asterix.om.base.AMutableInt32;
+
+public class Util {
+    // convert escapes in-place
+    // the string can only shrink while converting escapes so we can safely convert in-place.
+    // needs verification
+    public static boolean convertEscapes(AMutableCharArrayString text) {
+        boolean validStr = true;
+        if (text.getLength() == 0)
+            return true;
+        int length = text.getLength();
+        int dest = 0;
+        for (int source = 0; source < length; ++source) {
+            char ch = text.charAt(source);
+            // scan for escapes, a terminating slash cannot be an escape
+            if (ch == '\\' && source < length - 1) {
+                ++source; // skip the \ character
+                ch = text.charAt(source);
+
+                // The escape part should be re-validated
+                switch (ch) {
+                    case 'b':
+                        ch = '\b';
+                        break;
+                    case 'f':
+                        ch = '\f';
+                        break;
+                    case 'n':
+                        ch = '\n';
+                        break;
+                    case 'r':
+                        ch = '\r';
+                        break;
+                    case 't':
+                        ch = '\t';
+                        break;
+                    case '\\':
+                        ch = '\\';
+                        break;
+                    default:
+                        if (Lexer.isodigit(ch)) {
+                            int number = ch - '0';
+                            // There can be up to 3 octal digits in an octal escape
+                            //  \[0..3]nn or \nn or \n. We quit at 3 characters or
+                            // at the first non-octal character.
+                            if (source + 1 < length) {
+                                char digit = text.charAt(source + 1); // is the next digit also
+                                if (Lexer.isodigit(digit)) {
+                                    ++source;
+                                    number = (number << 3) + digit - '0';
+                                    if (number < 0x20 && source + 1 < length) {
+                                        digit = text.charAt(source + 1);
+                                        if (Lexer.isodigit(digit)) {
+                                            ++source;
+                                            number = (number << 3) + digit - '0';
+                                        }
+                                    }
+                                }
+                            }
+                            if (ch == 0) { // "\\0" is an invalid substring within a string literal
+                                validStr = false;
+                            }
+                        } else {
+                            // pass char after \ unmodified.
+                        }
+                        break;
+                }
+            }
+
+            if (dest == source) {
+                // no need to assign ch to text when we haven't seen any escapes yet.
+                // text[dest] = ch;
+                ++dest;
+            } else {
+                text.erase(dest);
+                text.setChar(dest, ch);
+                ++dest;
+                --source;
+            }
+        }
+
+        if (dest < length) {
+            text.erase(dest);
+            length = dest;
+        }
+        // silly, but to fulfull the original contract for this function
+        // we need to remove the last character in the string if it is a '\0'
+        // (earlier logic guaranteed that a '\0' can ONLY be the last character)
+        if (length > 0 && !(text.charAt(length - 1) == '\0')) {
+            //text.erase(length - 1);
+        }
+        return validStr;
+    }
+
+    /***************************************************************
+     * Copyright (C) 1990-2007, Condor Team, Computer Sciences Department,
+     * University of Wisconsin-Madison, WI.
+     * Licensed under the Apache License, Version 2.0 (the "License"); you
+     * may not use this file except in compliance with the License. You may
+     * obtain a copy of the License at
+     * http://www.apache.org/licenses/LICENSE-2.0
+     * Unless required by applicable law or agreed to in writing, software
+     * distributed under the License is distributed on an "AS IS" BASIS,
+     * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     * See the License for the specific language governing permissions and
+     * limitations under the License.
+     ***************************************************************/
+
+    public static Random initialized = new Random((new Date()).getTime());
+
+    public static int getRandomInteger() {
+        return initialized.nextInt();
+    }
+
+    public static double getRandomReal() {
+        return initialized.nextDouble();
+    }
+
+    public static int timezoneOffset(ClassAdTime clock) {
+        return clock.getOffset();
+    }
+
+    public static void getLocalTime(ClassAdTime now, ClassAdTime localtm) {
+        localtm.setValue(Calendar.getInstance(), now);
+        localtm.isAbsolute(true);
+    }
+
+    public static void absTimeToString(ClassAdTime atime, AMutableCharArrayString buffer) {
+        DateFormat formatter = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss");
+        //"yyyy-MM-dd'T'HH:mm:ss"
+        //2004-01-01T00:00:00+11:00
+        formatter.setTimeZone(TimeZone.getTimeZone("GMT"));
+        buffer.appendString(formatter.format(atime.getCalendar().getTime()));
+        buffer.appendString(
+                (atime.getOffset() >= 0 ? "+" : "-") + String.format("%02d", (Math.abs(atime.getOffset()) / 3600000))
+                        + ":" + String.format("%02d", ((Math.abs(atime.getOffset() / 60) % 60))));
+    }
+
+    public static void relTimeToString(long rsecs, AMutableCharArrayString buffer) {
+        double fractional_seconds;
+        int days, hrs, mins;
+        double secs;
+
+        if (rsecs < 0) {
+            buffer.appendChar('-');
+            rsecs = -rsecs;
+        }
+        fractional_seconds = rsecs % 1000;
+
+        days = (int) (rsecs / 1000);
+        hrs = days % 86400;
+        mins = hrs % 3600;
+        secs = (mins % 60) + (fractional_seconds / 1000.0);
+        days = days / 86400;
+        hrs = hrs / 3600;
+        mins = mins / 60;
+
+        if (days != 0) {
+            if (fractional_seconds == 0) {
+                buffer.appendString(String.format("%d+%02d:%02d:%02d", days, hrs, mins, (int) secs));
+            } else {
+                buffer.appendString(String.format("%d+%02d:%02d:%g", days, hrs, mins, secs));
+            }
+        } else if (hrs != 0) {
+            if (fractional_seconds == 0) {
+                buffer.appendString(String.format("%02d:%02d:%02d", hrs, mins, (int) secs));
+            } else {
+                buffer.appendString(String.format("%02d:%02d:%02g", hrs, mins, secs));
+            }
+        } else if (mins != 0) {
+            if (fractional_seconds == 0) {
+                buffer.appendString(String.format("%02d:%02d", mins, (int) secs));
+            } else {
+                buffer.appendString(String.format("%02d:%02g", mins, secs));
+            }
+            return;
+        } else {
+            if (fractional_seconds == 0) {
+                buffer.appendString(String.format("%02d", (int) secs));
+            } else {
+                buffer.appendString(String.format("%02g", secs));
+            }
+        }
+    }
+
+    public static void dayNumbers(int year, int month, int day, AMutableInt32 weekday, AMutableInt32 yearday) {
+        int fixed = fixedFromGregorian(year, month, day);
+        int jan1_fixed = fixedFromGregorian(year, 1, 1);
+        weekday.setValue(fixed % 7);
+        yearday.setValue(fixed - jan1_fixed);
+        return;
+    }
+
+    public static int fixedFromGregorian(int year, int month, int day) {
+        int fixed;
+        int month_adjustment;
+        if (month <= 2) {
+            month_adjustment = 0;
+        } else if (isLeapYear(year)) {
+            month_adjustment = -1;
+        } else {
+            month_adjustment = -2;
+        }
+        fixed = 365 * (year - 1) + ((year - 1) / 4) - ((year - 1) / 100) + ((year - 1) / 400)
+                + ((367 * month - 362) / 12) + month_adjustment + day;
+        return fixed;
+    }
+
+    public static boolean isLeapYear(int year) {
+        int mod4;
+        int mod400;
+        boolean leap_year;
+
+        mod4 = year % 4;
+        mod400 = year % 400;
+
+        if (mod4 == 0 && mod400 != 100 && mod400 != 200 && mod400 != 300) {
+            leap_year = true;
+        } else {
+            leap_year = false;
+        }
+        return leap_year;
+    }
+
+    public static int isInf(double x) {
+        if (Double.isInfinite(x)) {
+            return (x < 0.0) ? (-1) : 1;
+        }
+        return 0;
+    }
+
+    public static boolean isNan(double x) {
+        return Double.isNaN(x);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Value.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Value.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Value.java
new file mode 100644
index 0000000..f0466b2
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Value.java
@@ -0,0 +1,871 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class Value {
+
+    private ValueType valueType;
+    private long longVal;
+    private double doubleVal;
+    private boolean boolVal;
+    private ClassAdTime timeVal = new ClassAdTime();
+    private ClassAd classadVal = new ClassAd();
+    private ExprList listVal = new ExprList();
+    private String stringVal;
+
+    /// Value types
+    public enum ValueType {
+        NULL_VALUE,
+        /** The error value */
+        ERROR_VALUE,
+        /** The undefined value */
+        UNDEFINED_VALUE,
+        /** A boolean value (false, true) */
+        BOOLEAN_VALUE,
+        /** An integer value */
+        INTEGER_VALUE,
+        /** A real value */
+        REAL_VALUE,
+        /** A relative time value */
+        RELATIVE_TIME_VALUE,
+        /** An absolute time value */
+        ABSOLUTE_TIME_VALUE,
+        /** A string value */
+        STRING_VALUE,
+        /** A classad value */
+        CLASSAD_VALUE,
+        /** A list value (not owned here) */
+        LIST_VALUE,
+        /** A list value (owned via shared_ptr) */
+        SLIST_VALUE
+    };
+
+    /// Number factors
+    public enum NumberFactor {
+        /** No factor specified */
+        NO_FACTOR,
+        /** Byte factor */
+        B_FACTOR,
+        /** Kilo factor */
+        K_FACTOR,
+        /** Mega factor */
+        M_FACTOR,
+        /** Giga factor */
+        G_FACTOR,
+        /** Terra factor */
+        T_FACTOR
+    };
+
+    public ValueType getType() {
+        return valueType;
+    }
+
+    public boolean isBooleanValue(MutableBoolean b) {
+        if (valueType == ValueType.BOOLEAN_VALUE) {
+            b.setValue(boolVal);
+            return true;
+        }
+        return false;
+    }
+
+    public boolean isBooleanValue() {
+        return (valueType == ValueType.BOOLEAN_VALUE);
+    }
+
+    public boolean isIntegerValue(AMutableInt64 i) {
+        if (valueType == ValueType.INTEGER_VALUE) {
+            i.setValue(longVal);
+            return true;
+        }
+        return false;
+    }
+
+    public boolean isIntegerValue() {
+        return (valueType == ValueType.INTEGER_VALUE);
+    }
+
+    public boolean isRealValue(AMutableDouble r) {
+
+        if (valueType == ValueType.REAL_VALUE) {
+            r.setValue(doubleVal);
+            return true;
+        }
+        return false;
+    }
+
+    public boolean isRealValue() {
+        return (valueType == ValueType.REAL_VALUE);
+    }
+
+    public boolean isListValue(ExprList el) {
+        if (valueType == ValueType.LIST_VALUE || valueType == ValueType.SLIST_VALUE) {
+            el.getExprList().addAll(listVal.getExprList());
+            el.isShared = listVal.isShared;
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    public boolean isListValue() {
+        return (valueType == ValueType.LIST_VALUE || valueType == ValueType.SLIST_VALUE);
+    }
+
+    public boolean isStringValue() {
+        return (valueType == ValueType.STRING_VALUE);
+    }
+
+    public boolean isStringValue(AMutableCharArrayString s) {
+        if (valueType == ValueType.STRING_VALUE) {
+            s.setValue(stringVal);
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    public boolean isStringValue(AMutableCharArrayString s, int len) {
+        if (valueType == ValueType.STRING_VALUE) {
+            s.setValue(stringVal, len);
+            return (true);
+        }
+        return (false);
+    }
+
+    public boolean isStringValue(AMutableInt32 size) {
+        if (valueType == ValueType.STRING_VALUE) {
+            size.setValue(stringVal.length());
+            return true;
+        } else {
+            size.setValue(-1);
+            return false;
+        }
+    }
+
+    public boolean isClassAdValue(ClassAd ad) {
+        if (valueType == ValueType.CLASSAD_VALUE) {
+            ad.setValue(classadVal);
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    public boolean isClassAdValue() {
+        return (valueType == ValueType.CLASSAD_VALUE);
+    }
+
+    public boolean isUndefinedValue() {
+        return (valueType == ValueType.UNDEFINED_VALUE);
+    }
+
+    public boolean isErrorValue() {
+        return (valueType == ValueType.ERROR_VALUE);
+    }
+
+    public boolean isExceptional() {
+        return (valueType == ValueType.UNDEFINED_VALUE || valueType == ValueType.ERROR_VALUE);
+    }
+
+    public boolean isAbsoluteTimeValue() {
+        return (valueType == ValueType.ABSOLUTE_TIME_VALUE);
+    }
+
+    public boolean isAbsoluteTimeValue(ClassAdTime secs) {
+        if (valueType == ValueType.ABSOLUTE_TIME_VALUE) {
+            secs.setValue(timeVal);
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    public boolean isRelativeTimeValue() {
+        return (valueType == ValueType.RELATIVE_TIME_VALUE);
+    }
+
+    public boolean isRelativeTimeValue(ClassAdTime secs) {
+
+        if (valueType == ValueType.RELATIVE_TIME_VALUE) {
+            secs.setValue(timeVal);
+            return true;
+        }
+        return false;
+    }
+
+    public boolean isNumber() {
+        return (valueType == ValueType.INTEGER_VALUE || valueType == ValueType.REAL_VALUE);
+    }
+
+    public boolean isNumber(AMutableInt64 i) {
+        switch (valueType) {
+            case INTEGER_VALUE:
+                i.setValue(longVal);
+                return true;
+
+            case REAL_VALUE:
+                i.setValue((long) doubleVal);
+                return true;
+
+            case BOOLEAN_VALUE:
+                i.setValue(boolVal ? 1L : 0L);
+                return true;
+            default:
+                return false;
+        }
+    }
+
+    public boolean isNumber(AMutableDouble r) {
+        switch (valueType) {
+            case INTEGER_VALUE:
+                r.setValue(longVal);
+                return true;
+
+            case REAL_VALUE:
+                r.setValue(doubleVal);
+                return true;
+
+            case BOOLEAN_VALUE:
+                r.setValue(boolVal ? 1.0 : 0.0);
+                return true;
+
+            default:
+                return false;
+        }
+    }
+
+    // Implementation
+    public static final double[] ScaleFactor = { 1.0, // none
+            1.0, // B
+            1024.0, // Kilo
+            1024.0 * 1024.0, // Mega
+            1024.0 * 1024.0 * 1024.0, // Giga
+            1024.0 * 1024.0 * 1024.0 * 1024.0 // Terra
+    };
+
+    public Value() {
+        valueType = ValueType.UNDEFINED_VALUE;
+    }
+
+    public Value(Value value) throws HyracksDataException {
+        valueType = value.valueType;
+        switch (value.valueType) {
+            case ABSOLUTE_TIME_VALUE:
+                timeVal = new ClassAdTime(value.timeVal);
+                break;
+            case BOOLEAN_VALUE:
+                this.boolVal = value.boolVal;
+                break;
+            case CLASSAD_VALUE:
+                this.classadVal = new ClassAd(value.classadVal);
+                break;
+            case ERROR_VALUE:
+                break;
+            case INTEGER_VALUE:
+                this.longVal = value.longVal;
+                break;
+            case LIST_VALUE:
+                this.listVal = new ExprList(value.listVal);
+                break;
+            case NULL_VALUE:
+                break;
+            case REAL_VALUE:
+                this.doubleVal = value.doubleVal;
+                break;
+            case RELATIVE_TIME_VALUE:
+                this.timeVal = new ClassAdTime(value.timeVal);
+                break;
+            case SLIST_VALUE:
+                this.listVal = new ExprList(value.listVal);
+                break;
+            case STRING_VALUE:
+                this.stringVal = value.stringVal;
+                break;
+            case UNDEFINED_VALUE:
+                break;
+            default:
+                break;
+        }
+    }
+
+    public void setValue(Value value) throws HyracksDataException {
+        valueType = value.valueType;
+        switch (value.valueType) {
+            case ABSOLUTE_TIME_VALUE:
+                this.timeVal.setValue(value.timeVal);
+                break;
+            case BOOLEAN_VALUE:
+                this.boolVal = value.boolVal;
+                break;
+            case CLASSAD_VALUE:
+                this.classadVal.setValue(value.classadVal);
+                break;
+            case ERROR_VALUE:
+                break;
+            case INTEGER_VALUE:
+                this.longVal = value.longVal;
+                break;
+            case LIST_VALUE:
+                this.listVal.setValue(value.listVal);
+                break;
+            case NULL_VALUE:
+                break;
+            case REAL_VALUE:
+                this.doubleVal = value.doubleVal;
+                break;
+            case RELATIVE_TIME_VALUE:
+                this.timeVal.setValue((value.timeVal));
+                break;
+            case SLIST_VALUE:
+                listVal.setValue(value.listVal);
+                break;
+            case STRING_VALUE:
+                stringVal = value.stringVal;
+                break;
+            case UNDEFINED_VALUE:
+                break;
+            default:
+                break;
+        }
+    }
+
+    public void assign(Value value) throws HyracksDataException {
+        if (this != value) {
+            setValue(value);
+        }
+    }
+
+    public void clear() {
+        valueType = ValueType.UNDEFINED_VALUE;
+    }
+
+    public void setRealValue(double r) {
+        valueType = ValueType.REAL_VALUE;
+        doubleVal = r;
+    }
+
+    public void setRealValue(AMutableDouble r) {
+        valueType = ValueType.REAL_VALUE;
+        doubleVal = r.getDoubleValue();
+    }
+
+    public void setBooleanValue(boolean b) {
+        valueType = ValueType.BOOLEAN_VALUE;
+        boolVal = b;
+    }
+
+    public void setBooleanValue(MutableBoolean b) {
+        valueType = ValueType.BOOLEAN_VALUE;
+        boolVal = b.booleanValue();
+    }
+
+    public void setIntegerValue(long i) {
+        valueType = ValueType.INTEGER_VALUE;
+        longVal = i;
+    }
+
+    public void setUndefinedValue() {
+        valueType = ValueType.UNDEFINED_VALUE;
+    }
+
+    public void setErrorValue() {
+        valueType = ValueType.ERROR_VALUE;
+    }
+
+    public void setStringValue(AMutableCharArrayString s) {
+        valueType = ValueType.STRING_VALUE;
+        stringVal = s.toString();
+    }
+
+    public void setStringValue(String s) {
+        valueType = ValueType.STRING_VALUE;
+        stringVal = s;
+    }
+
+    public void setListValue(ExprList expList) throws HyracksDataException {
+        valueType = expList.isShared ? ValueType.SLIST_VALUE : ValueType.LIST_VALUE;
+        listVal.setValue(expList);
+    }
+
+    public void setClassAdValue(ClassAd ad) {
+        clear();
+        valueType = ValueType.CLASSAD_VALUE;
+        classadVal.setValue(ad);
+    }
+
+    public void setRelativeTimeValue(ClassAdTime rsecs) {
+        clear();
+        valueType = ValueType.RELATIVE_TIME_VALUE;
+        timeVal.setValue(rsecs);
+    }
+
+    public void setRelativeTimeValue(long rsecs) {
+        clear();
+        valueType = ValueType.RELATIVE_TIME_VALUE;
+        timeVal.setValue(rsecs);
+        timeVal.isAbsolute(false);
+    }
+
+    public void setAbsoluteTimeValue(ClassAdTime tim) {
+        clear();
+        valueType = ValueType.ABSOLUTE_TIME_VALUE;
+        timeVal.setValue(tim);
+    }
+
+    public boolean sameAs(Value otherValue) {
+        boolean is_same = false;
+        if (valueType != otherValue.valueType) {
+            is_same = false;
+        } else {
+            switch (valueType) {
+                case NULL_VALUE:
+                case ERROR_VALUE:
+                case UNDEFINED_VALUE:
+                    is_same = true;
+                    break;
+                case BOOLEAN_VALUE:
+                    is_same = (boolVal == otherValue.boolVal);
+                    break;
+                case INTEGER_VALUE:
+                    is_same = (longVal == otherValue.longVal);
+                    break;
+                case REAL_VALUE:
+                    is_same = (doubleVal == otherValue.doubleVal);
+                    break;
+                case LIST_VALUE:
+                case SLIST_VALUE:
+                    is_same = listVal.equals(otherValue.listVal);
+                    break;
+                case CLASSAD_VALUE:
+                    is_same = classadVal.equals(otherValue.classadVal);
+                    break;
+                case RELATIVE_TIME_VALUE:
+                case ABSOLUTE_TIME_VALUE:
+                    is_same = timeVal.equals(otherValue.timeVal);
+                    break;
+                case STRING_VALUE:
+                    is_same = stringVal.equals(otherValue.stringVal);
+                    break;
+            }
+        }
+        return is_same;
+    }
+
+    public boolean equals(Value value) {
+        return sameAs(value);
+    }
+
+    public boolean isBooleanValueEquiv(MutableBoolean b) {
+        return isBooleanValue(b);
+    }
+
+    @Override
+    public String toString() {
+        ClassAdUnParser unparser = new PrettyPrint();
+        AMutableCharArrayString unparsed_text = new AMutableCharArrayString();
+        switch (valueType) {
+            case ABSOLUTE_TIME_VALUE:
+            case CLASSAD_VALUE:
+            case RELATIVE_TIME_VALUE:
+            case SLIST_VALUE:
+            case LIST_VALUE:
+                try {
+                    unparser.unparse(unparsed_text, this);
+                } catch (HyracksDataException e) {
+                    e.printStackTrace();
+                }
+                return unparsed_text.toString();
+            case BOOLEAN_VALUE:
+                if (boolVal) {
+                    return "true";
+                } else {
+                    return "false";
+                }
+            case ERROR_VALUE:
+                return "error";
+            case INTEGER_VALUE:
+                return String.valueOf(longVal);
+            case NULL_VALUE:
+                return "(null)";
+            case REAL_VALUE:
+                return String.valueOf(doubleVal);
+            case STRING_VALUE:
+                return stringVal;
+            case UNDEFINED_VALUE:
+                return "undefined";
+            default:
+                break;
+        }
+        return null;
+    }
+
+    public static boolean convertValueToRealValue(Value value, Value realValue) throws HyracksDataException {
+        boolean could_convert;
+        AMutableCharArrayString buf = new AMutableCharArrayString();
+        int endIndex;
+        char end;
+        AMutableInt64 ivalue = new AMutableInt64(0);
+        ClassAdTime atvalue = new ClassAdTime();
+        MutableBoolean bvalue = new MutableBoolean();
+        double rvalue;
+        NumberFactor nf = NumberFactor.NO_FACTOR;
+
+        switch (value.getType()) {
+            case UNDEFINED_VALUE:
+                realValue.setUndefinedValue();
+                could_convert = false;
+                break;
+
+            case ERROR_VALUE:
+            case CLASSAD_VALUE:
+            case LIST_VALUE:
+            case SLIST_VALUE:
+                realValue.setErrorValue();
+                could_convert = false;
+                break;
+
+            case STRING_VALUE:
+                could_convert = true;
+                value.isStringValue(buf);
+                endIndex = buf.fistNonDoubleDigitChar();
+                if (endIndex < 0) {
+                    // no non digit
+                    String buffString = buf.toString();
+                    if (buffString.contains("INF")) {
+                        buffString = buffString.replace("INF", "Infinity");
+                    }
+                    rvalue = Double.parseDouble(buffString);
+                    nf = NumberFactor.NO_FACTOR;
+                } else {
+                    rvalue = Double.parseDouble(buf.substr(0, endIndex));
+                    end = buf.charAt(endIndex);
+                    switch (Character.toUpperCase(end)) {
+                        case 'B':
+                            nf = NumberFactor.B_FACTOR;
+                            break;
+                        case 'K':
+                            nf = NumberFactor.K_FACTOR;
+                            break;
+                        case 'M':
+                            nf = NumberFactor.M_FACTOR;
+                            break;
+                        case 'G':
+                            nf = NumberFactor.G_FACTOR;
+                            break;
+                        case 'T':
+                            nf = NumberFactor.T_FACTOR;
+                            break;
+                        case '\0':
+                            nf = NumberFactor.NO_FACTOR;
+                            break;
+                        default:
+                            nf = NumberFactor.NO_FACTOR;
+                            break;
+                    }
+                }
+
+                if (could_convert) {
+                    realValue.setRealValue(rvalue * Value.ScaleFactor[nf.ordinal()]);
+                }
+                break;
+
+            case BOOLEAN_VALUE:
+                value.isBooleanValue(bvalue);
+                realValue.setRealValue(bvalue.booleanValue() ? 1.0 : 0.0);
+                could_convert = true;
+                break;
+
+            case INTEGER_VALUE:
+                value.isIntegerValue(ivalue);
+                realValue.setRealValue(ivalue.getLongValue());
+                could_convert = true;
+                break;
+
+            case REAL_VALUE:
+                realValue.copyFrom(value);
+                could_convert = true;
+                break;
+
+            case ABSOLUTE_TIME_VALUE:
+                value.isAbsoluteTimeValue(atvalue);
+                realValue.setRealValue(atvalue.getTimeInMillis() / 1000.0);
+                could_convert = true;
+                break;
+
+            case RELATIVE_TIME_VALUE:
+                value.isRelativeTimeValue(atvalue);
+                realValue.setRealValue(atvalue.getRelativeTime() / 1000.0);
+                could_convert = true;
+                break;
+
+            default:
+                could_convert = false; // Make gcc's -Wuninitalized happy
+                throw new HyracksDataException("Should not reach here");
+        }
+        return could_convert;
+    }
+
+    public static boolean convertValueToIntegerValue(Value value, Value integerValue) throws HyracksDataException {
+        boolean could_convert;
+        AMutableCharArrayString buf = new AMutableCharArrayString();
+        char end;
+        AMutableInt64 ivalue = new AMutableInt64(0);
+        AMutableDouble rtvalue = new AMutableDouble(0);
+        ClassAdTime atvalue = new ClassAdTime();
+        MutableBoolean bvalue = new MutableBoolean();
+        NumberFactor nf;
+
+        switch (value.getType()) {
+            case UNDEFINED_VALUE:
+                integerValue.setUndefinedValue();
+                could_convert = false;
+                break;
+
+            case ERROR_VALUE:
+            case CLASSAD_VALUE:
+            case LIST_VALUE:
+            case SLIST_VALUE:
+                integerValue.setErrorValue();
+                could_convert = false;
+                break;
+
+            case STRING_VALUE:
+                could_convert = true;
+                value.isStringValue(buf);
+                int endIndex = buf.firstNonDigitChar();
+                if (endIndex < 0) {
+                    // no non digit
+                    ivalue.setValue(Long.parseLong(buf.toString()));
+                    nf = NumberFactor.NO_FACTOR;
+                    break;
+                } else {
+                    ivalue.setValue(Long.parseLong(buf.substr(0, endIndex)));
+                    end = buf.charAt(endIndex);
+                    switch (Character.toUpperCase(end)) {
+                        case 'B':
+                            nf = NumberFactor.B_FACTOR;
+                            break;
+                        case 'K':
+                            nf = NumberFactor.K_FACTOR;
+                            break;
+                        case 'M':
+                            nf = NumberFactor.M_FACTOR;
+                            break;
+                        case 'G':
+                            nf = NumberFactor.G_FACTOR;
+                            break;
+                        case 'T':
+                            nf = NumberFactor.T_FACTOR;
+                            break;
+                        case '\0':
+                            nf = NumberFactor.NO_FACTOR;
+                            break;
+                        default:
+                            nf = NumberFactor.NO_FACTOR;
+                            break;
+                    }
+                    if (could_convert) {
+                        integerValue
+                                .setIntegerValue((long) ((ivalue.getLongValue() * Value.ScaleFactor[nf.ordinal()])));
+                    }
+                }
+                break;
+
+            case BOOLEAN_VALUE:
+                value.isBooleanValue(bvalue);
+                integerValue.setIntegerValue(bvalue.booleanValue() ? 1 : 0);
+                could_convert = true;
+                break;
+
+            case INTEGER_VALUE:
+                integerValue.copyFrom(value);
+                could_convert = true;
+                break;
+
+            case REAL_VALUE:
+                value.isRealValue(rtvalue);
+                integerValue.setIntegerValue((long) rtvalue.getDoubleValue());
+                could_convert = true;
+                break;
+
+            case ABSOLUTE_TIME_VALUE:
+                value.isAbsoluteTimeValue(atvalue);
+                integerValue.setIntegerValue(atvalue.getTimeInMillis() / 1000L);
+                could_convert = true;
+                break;
+            case RELATIVE_TIME_VALUE:
+
+                value.isRelativeTimeValue(atvalue);
+                integerValue.setIntegerValue((atvalue.getTime() / 1000L));
+                could_convert = true;
+                break;
+
+            default:
+                could_convert = false; // Make gcc's -Wuninitalized happy
+                throw new HyracksDataException("Should not reach here");
+        }
+        return could_convert;
+    }
+
+    public void copyFrom(Value val) throws HyracksDataException {
+        clear();
+        valueType = val.valueType;
+        switch (val.valueType) {
+            case STRING_VALUE:
+                stringVal = val.stringVal;
+                return;
+
+            case BOOLEAN_VALUE:
+                boolVal = val.boolVal;
+                return;
+
+            case INTEGER_VALUE:
+                longVal = val.longVal;
+                return;
+
+            case REAL_VALUE:
+
+                doubleVal = val.doubleVal;
+                return;
+            case UNDEFINED_VALUE:
+            case ERROR_VALUE:
+                return;
+            case LIST_VALUE:
+            case SLIST_VALUE:
+                listVal.copyFrom(val.listVal);
+                return;
+            case CLASSAD_VALUE:
+                classadVal.copyFrom(val.classadVal);
+                return;
+
+            case RELATIVE_TIME_VALUE:
+            case ABSOLUTE_TIME_VALUE:
+                timeVal.setValue(val.timeVal);
+                return;
+            default:
+                setUndefinedValue();
+        }
+    }
+
+    public static boolean convertValueToStringValue(Value value, Value stringValue) throws HyracksDataException {
+        boolean could_convert = false;
+        ClassAdTime atvalue = new ClassAdTime();
+        AMutableCharArrayString string_representation = new AMutableCharArrayString();
+        ClassAdUnParser unparser = new PrettyPrint();
+
+        switch (value.getType()) {
+            case UNDEFINED_VALUE:
+                stringValue.setUndefinedValue();
+                could_convert = false;
+                break;
+
+            case ERROR_VALUE:
+                stringValue.setErrorValue();
+                could_convert = false;
+                break;
+
+            case STRING_VALUE:
+                stringValue.copyFrom(value);
+                could_convert = true;
+                break;
+
+            case CLASSAD_VALUE:
+            case LIST_VALUE:
+            case SLIST_VALUE:
+            case BOOLEAN_VALUE:
+            case INTEGER_VALUE:
+            case REAL_VALUE:
+                unparser.unparse(string_representation, value);
+                stringValue.setStringValue(string_representation);
+                could_convert = true;
+                break;
+
+            case ABSOLUTE_TIME_VALUE:
+                value.isAbsoluteTimeValue(atvalue);
+                Util.absTimeToString(atvalue, string_representation);
+                stringValue.setStringValue(string_representation);
+                could_convert = true;
+                break;
+
+            case RELATIVE_TIME_VALUE:
+                value.isRelativeTimeValue(atvalue);
+                Util.relTimeToString(atvalue.getTimeInMillis(), string_representation);
+                stringValue.setStringValue(string_representation);
+                could_convert = true;
+                break;
+
+            default:
+                could_convert = false; // Make gcc's -Wuninitalized happy
+                throw new HyracksDataException("Should not reach here");
+        }
+        return could_convert;
+    }
+
+    public boolean isSListValue(ExprList l) throws HyracksDataException {
+        if (valueType == ValueType.SLIST_VALUE || valueType == ValueType.LIST_VALUE) {
+            l.setValue(listVal);
+            return true;
+        } else {
+            return false;
+        }
+    }
+
+    public ValueType getValueType() {
+        return valueType;
+    }
+
+    public long getLongVal() {
+        return longVal;
+    }
+
+    public double getDoubleVal() {
+        return doubleVal;
+    }
+
+    public boolean getBoolVal() {
+        return boolVal;
+    }
+
+    public ClassAdTime getTimeVal() {
+        return timeVal;
+    }
+
+    public ClassAd getClassadVal() {
+        return classadVal;
+    }
+
+    public ExprList getListVal() {
+        return listVal;
+    }
+
+    public String getStringVal() {
+        return stringVal;
+    }
+
+    public static double[] getScalefactor() {
+        return ScaleFactor;
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/AttributeReferencePool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/AttributeReferencePool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/AttributeReferencePool.java
new file mode 100644
index 0000000..d7bcfcc
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/AttributeReferencePool.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.AttributeReference;
+
+public class AttributeReferencePool extends Pool<AttributeReference> {
+
+    @Override
+    public AttributeReference newInstance() {
+        return new AttributeReference();
+    }
+
+    @Override
+    protected void reset(AttributeReference obj) {
+        obj.reset();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/BitSetPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/BitSetPool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/BitSetPool.java
new file mode 100644
index 0000000..b6f110d
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/BitSetPool.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import java.util.BitSet;
+
+public class BitSetPool extends Pool<BitSet> {
+
+    @Override
+    public BitSet newInstance() {
+        return new BitSet();
+    }
+
+    @Override
+    protected void reset(BitSet obj) {
+        obj.clear();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/CaseInsensitiveStringPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/CaseInsensitiveStringPool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/CaseInsensitiveStringPool.java
new file mode 100644
index 0000000..7e88961
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/CaseInsensitiveStringPool.java
@@ -0,0 +1,55 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import java.util.Stack;
+
+import org.apache.asterix.external.classad.CaseInsensitiveString;
+
+public class CaseInsensitiveStringPool extends Pool<CaseInsensitiveString> {
+
+    protected Stack<CaseInsensitiveString> stock = new Stack<CaseInsensitiveString>();
+
+    @Override
+    public CaseInsensitiveString get() {
+        if (!stock.isEmpty()) {
+            return stock.pop();
+        } else {
+            return newInstance();
+
+        }
+    }
+
+    @Override
+    public void reset() {
+    }
+
+    public void put(CaseInsensitiveString aString) {
+        stock.push(aString);
+    }
+
+    @Override
+    public CaseInsensitiveString newInstance() {
+        return new CaseInsensitiveString();
+    }
+
+    @Override
+    protected void reset(CaseInsensitiveString obj) {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/CharArrayStringPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/CharArrayStringPool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/CharArrayStringPool.java
new file mode 100644
index 0000000..6fe1d16
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/CharArrayStringPool.java
@@ -0,0 +1,34 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.AMutableCharArrayString;
+
+public class CharArrayStringPool extends Pool<AMutableCharArrayString> {
+
+    @Override
+    public AMutableCharArrayString newInstance() {
+        return new AMutableCharArrayString(32);
+    }
+
+    @Override
+    protected void reset(AMutableCharArrayString obj) {
+        obj.reset();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ClassAdPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ClassAdPool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ClassAdPool.java
new file mode 100644
index 0000000..40b8eb3
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ClassAdPool.java
@@ -0,0 +1,36 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.ClassAd;
+
+public class ClassAdPool extends Pool<ClassAd> {
+
+    @Override
+    public ClassAd newInstance() {
+        return new ClassAd(false, false);
+    }
+
+    @Override
+    protected void reset(ClassAd obj) {
+        //probably we want to get the expressions stored inside the classad so we can return them to their pools
+        obj.reset();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ExprHolderPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ExprHolderPool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ExprHolderPool.java
new file mode 100644
index 0000000..b34e863
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ExprHolderPool.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.ExprTreeHolder;
+
+public class ExprHolderPool extends Pool<ExprTreeHolder> {
+    @Override
+    public ExprTreeHolder newInstance() {
+        return new ExprTreeHolder();
+    }
+
+    @Override
+    protected void reset(ExprTreeHolder obj) {
+        obj.reset();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ExprListPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ExprListPool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ExprListPool.java
new file mode 100644
index 0000000..1f2958a
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ExprListPool.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.ExprList;
+
+public class ExprListPool extends Pool<ExprList> {
+
+    @Override
+    public ExprList newInstance() {
+        return new ExprList();
+    }
+
+    @Override
+    protected void reset(ExprList obj) {
+        obj.reset();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/LiteralPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/LiteralPool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/LiteralPool.java
new file mode 100644
index 0000000..23d8dae
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/LiteralPool.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.Literal;
+
+public class LiteralPool extends Pool<Literal> {
+    @Override
+    public Literal newInstance() {
+        return new Literal();
+    }
+
+    @Override
+    protected void reset(Literal obj) {
+        obj.reset();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/OperationPool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/OperationPool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/OperationPool.java
new file mode 100644
index 0000000..062d870
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/OperationPool.java
@@ -0,0 +1,35 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.Operation;
+
+public class OperationPool extends Pool<Operation> {
+
+    @Override
+    public Operation newInstance() {
+        return new Operation();
+    }
+
+    @Override
+    protected void reset(Operation obj) {
+        obj.reset();
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/Pool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/Pool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/Pool.java
new file mode 100644
index 0000000..6a39596
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/Pool.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public abstract class Pool<T> {
+    protected List<T> inUse = new ArrayList<T>();
+    protected int pointer = 0;
+
+    public T get() {
+        if (pointer >= inUse.size()) {
+            inUse.add(newInstance());
+        }
+        T t = inUse.get(pointer);
+        pointer++;
+        reset(t);
+        return t;
+    }
+
+    public abstract T newInstance();
+
+    public void reset() {
+        pointer = 0;
+    }
+
+    protected abstract void reset(T obj);
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/TokenValuePool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/TokenValuePool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/TokenValuePool.java
new file mode 100644
index 0000000..a5328ee
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/TokenValuePool.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.TokenValue;
+
+public class TokenValuePool extends Pool<TokenValue> {
+    @Override
+    public TokenValue newInstance() {
+        return new TokenValue();
+    }
+
+    @Override
+    protected void reset(TokenValue obj) {
+        obj.reset();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ValuePool.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ValuePool.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ValuePool.java
new file mode 100644
index 0000000..5034dbe
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/object/pool/ValuePool.java
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.object.pool;
+
+import org.apache.asterix.external.classad.Value;
+
+public class ValuePool extends Pool<Value> {
+    @Override
+    public Value newInstance() {
+        return new Value();
+    }
+
+    @Override
+    protected void reset(Value obj) {
+        obj.clear();
+    }
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdFunctionalTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdFunctionalTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdFunctionalTest.java
new file mode 100644
index 0000000..2f23684
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdFunctionalTest.java
@@ -0,0 +1,57 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.test;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+public class ClassAdFunctionalTest extends TestCase {
+    /**
+     * Create the test case
+     *
+     * @param testName
+     *            name of the test case
+     */
+    public ClassAdFunctionalTest(String testName) {
+        super(testName);
+    }
+
+    /**
+     * @return the suite of tests being tested
+     */
+    public static Test suite() {
+        return new TestSuite(ClassAdFunctionalTest.class);
+    }
+
+    /**
+     * Rigourous Test :-)
+     */
+    public void testApp() {
+
+        String[] args = { "", "-d", "-v", getClass().getResource("/functional_tests.txt").getPath() };
+        try {
+            FunctionalTester.test(args.length, args);
+        } catch (Exception e) {
+            e.printStackTrace();
+            assertTrue(false);
+        }
+        assertTrue(true);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
new file mode 100644
index 0000000..b0e0925
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdParserTest.java
@@ -0,0 +1,82 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.test;
+
+import java.io.BufferedReader;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+
+import org.apache.asterix.external.classad.ClassAd;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+public class ClassAdParserTest extends TestCase {
+    /**
+     * Create the test case
+     *
+     * @param testName
+     *            name of the test case
+     */
+    public ClassAdParserTest(String testName) {
+        super(testName);
+    }
+
+    /**
+     * @return the suite of tests being tested
+     */
+    public static Test suite() {
+        return new TestSuite(ClassAdParserTest.class);
+    }
+
+    /**
+     *
+     */
+    public void testApp() {
+        try {
+            // test here
+            ClassAd pAd = new ClassAd();
+            String szInput;
+            String[] files = new String[] { "/testdata.txt" };
+            BufferedReader infile = null;
+            for (String path : files) {
+                infile = Files.newBufferedReader(Paths.get(getClass().getResource(path).getPath()),
+                        StandardCharsets.UTF_8);
+                szInput = infile.readLine();
+                while (szInput != null) {
+                    if (szInput.trim().length() == 0) {
+                        // ClassAdChain completed
+                        pAd.clear();
+                    } else if (!pAd.insert(szInput)) {
+                        // Problem
+                        System.out.println("BARFED ON:" + szInput);
+                        assert (false);
+                    }
+                    szInput = infile.readLine();
+                }
+                infile.close();
+            }
+        } catch (Exception e) {
+            assertTrue(false);
+        }
+        assertTrue(true);
+    }
+}


[02/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/resources/testdata.txt
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/testdata.txt b/asterix-external-data/src/test/resources/testdata.txt
new file mode 100644
index 0000000..db20559
--- /dev/null
+++ b/asterix-external-data/src/test/resources/testdata.txt
@@ -0,0 +1,684032 @@
+Machine = "glow-c005.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391693
+UpdateSequenceNumber = 1447
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223717.78"
+PublicClaimId = "<128.105.245.5:35840>#1358047163#123#..."
+TotalTimeMatchedIdle = 8
+HasMPI = true
+TotalClaimRunTime = 7109
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c005"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 23
+Name = "slot1@glow-c005.cs.wisc.edu"
+ImageSize = 108228
+NumPids = 1
+MonitorSelfTime = 1358391570
+TimeToLive = 2147483647
+KeyboardIdle = 345726
+LastBenchmark = 1358374272
+TotalDisk = 75664548
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358374272
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1445
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.095821
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 7279
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 337084
+MonitorSelfImageSize = 9400.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391441
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 94
+TotalMemory = 2025
+DaemonStartTime = 1358047163
+EnteredCurrentActivity = 1358390676
+MyAddress = "<128.105.245.5:35840>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223717.78#1358389443"
+HasJava = true
+EnteredCurrentState = 1358384583
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1012
+IsWakeAble = true
+MyCurrentTime = 1358391693
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 36
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1261
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x48000000000a051140002aaaaaa81400"
+KFlops = 1089447
+UpdatesSequenced = 1443
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4656
+Arch = "INTEL"
+Mips = 2522
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391441
+HasTDP = true
+ConsoleIdle = 345726
+SubnetMask = "255.255.254.0"
+UpdatesLost = 254
+TotalJobRunTime = 1017
+StartdIpAddr = "<128.105.245.5:35840>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.060000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 7
+HibernationState = "NONE"
+JavaMFlops = 310.359924
+MonitorSelfAge = 344408
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390676
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37832274
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c005.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:02:b3:d9:0e:ba"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.060000
+
+Machine = "glow-c005.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391694
+UpdateSequenceNumber = 1435
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223679.4"
+PublicClaimId = "<128.105.245.5:35840>#1358047163#125#..."
+TotalTimeMatchedIdle = 2
+HasMPI = true
+TotalClaimRunTime = 7087
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c005"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 10
+Name = "slot2@glow-c005.cs.wisc.edu"
+ImageSize = 115004
+NumPids = 1
+MonitorSelfTime = 1358391570
+TimeToLive = 2147483647
+KeyboardIdle = 345726
+LastBenchmark = 1358374272
+TotalDisk = 75664548
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358374272
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1436
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.095821
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 8211
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 336211
+MonitorSelfImageSize = 9400.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391429
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 82
+TotalMemory = 2025
+DaemonStartTime = 1358047163
+EnteredCurrentActivity = 1358384607
+MyAddress = "<128.105.245.5:35840>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223679.4#1358384461"
+HasJava = true
+EnteredCurrentState = 1358384601
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1012
+IsWakeAble = true
+MyCurrentTime = 1358391694
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 9
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1261
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00200000080000020000000000000000"
+KFlops = 1089447
+UpdatesSequenced = 1435
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4656
+Arch = "INTEL"
+Mips = 2522
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391429
+HasTDP = true
+ConsoleIdle = 345726
+SubnetMask = "255.255.254.0"
+UpdatesLost = 25
+TotalJobRunTime = 7087
+StartdIpAddr = "<128.105.245.5:35840>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.060000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 7
+HibernationState = "NONE"
+JavaMFlops = 310.359924
+MonitorSelfAge = 344408
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358384607
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37832274
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c005.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:02:b3:d9:0e:ba"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.060000
+
+Machine = "glow-c015.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391469
+UpdateSequenceNumber = 1190
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_33"
+OSG_major = 3
+JobId = "1223684.3"
+PublicClaimId = "<128.105.245.15:56069>#1358102086#81#..."
+TotalTimeMatchedIdle = 4
+HasMPI = true
+TotalClaimRunTime = 5157
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c015"
+JavaVendor = "Sun Microsystems Inc."
+HasAFS_OSG = true
+TotalTimePreemptingVacating = 5
+Name = "slot1@glow-c015.cs.wisc.edu"
+ImageSize = 115048
+NumPids = 1
+MonitorSelfTime = 1358391300
+TimeToLive = 2147483647
+KeyboardIdle = 290579
+LastBenchmark = 1358379296
+TotalDisk = 75658512
+OSglibc_minor = 5
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358379296
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1187
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.091578
+HasAFS_Atlas = true
+ClockDay = 3
+IsWakeOnLanEnabled = true
+HasCVMFS_CMS = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 6581
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasAFS = true
+AFSCacheUsed = 132
+HasIOProxy = true
+TotalTimeClaimedBusy = 282700
+MonitorSelfImageSize = 9724.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391303
+OSIssue = "Scientific Linux release 5.8 (Boron)"
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 46
+OSG_micro = 8
+TotalMemory = 2025
+DaemonStartTime = 1358102086
+EnteredCurrentActivity = 1358386312
+MyAddress = "<128.105.245.15:56069>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223684.3#1358384463"
+AFS_SYSNAMES = "'i386_linux26'"
+HasJava = true
+EnteredCurrentState = 1358386312
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1012
+IsWakeAble = true
+OSglibc_major = 2
+MyCurrentTime = 1358391469
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 38
+AFSCacheAvail = 100000
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1257
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+HasCVMFS_Atlas = true
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x55554a80000000355400005555554001"
+KFlops = 1042629
+UpdatesSequenced = 1186
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4708
+AFS_SYSNAME = "i386_linux26"
+Arch = "INTEL"
+Mips = 2435
+Activity = "Busy"
+OSKernelRelease = "2.6.18-308.13.1.el5"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+OSlibc6 = "libc-2.5.so"
+LastFetchWorkCompleted = 1358391304
+HasTDP = true
+ConsoleIdle = 290579
+SubnetMask = "255.255.254.0"
+UpdatesLost = 208
+OSRedHatRelease = "Scientific Linux release 5.8 (Boron)"
+TotalJobRunTime = 5157
+StartdIpAddr = "<128.105.245.15:56069>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+OSG_minor = 1
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.120000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 5
+HibernationState = "NONE"
+JavaMFlops = 310.838867
+MonitorSelfAge = 289215
+LoadAvg = 1.060000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+OSglibc_micro = 0
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358386312
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37829256
+VirtualMemory = 2124538
+TotalVirtualMemory = 4249076
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c015.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:02:b3:d9:0e:43"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.060000
+TotalCondorLoadAvg = 2.120000
+
+Machine = "glow-c015.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391773
+UpdateSequenceNumber = 1174
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_33"
+OSG_major = 3
+JobId = "1223718.77"
+PublicClaimId = "<128.105.245.15:56069>#1358102086#78#..."
+TotalTimeMatchedIdle = 3
+HasMPI = true
+TotalClaimRunTime = 7167
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c015"
+JavaVendor = "Sun Microsystems Inc."
+HasAFS_OSG = true
+TotalTimePreemptingVacating = 8
+Name = "slot2@glow-c015.cs.wisc.edu"
+ImageSize = 108056
+NumPids = 1
+MonitorSelfTime = 1358391540
+TimeToLive = 2147483647
+KeyboardIdle = 290884
+LastBenchmark = 1358379296
+TotalDisk = 75658512
+OSglibc_minor = 5
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358379296
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1175
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.091913
+HasAFS_Atlas = true
+ClockDay = 3
+IsWakeOnLanEnabled = true
+HasCVMFS_CMS = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 5074
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasAFS = true
+AFSCacheUsed = 132
+HasIOProxy = true
+TotalTimeClaimedBusy = 284532
+MonitorSelfImageSize = 9724.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391549
+OSIssue = "Scientific Linux release 5.8 (Boron)"
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 54
+OSG_micro = 8
+TotalMemory = 2025
+DaemonStartTime = 1358102086
+EnteredCurrentActivity = 1358390882
+MyAddress = "<128.105.245.15:56069>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223718.77#1358389444"
+AFS_SYSNAMES = "'i386_linux26'"
+HasJava = true
+EnteredCurrentState = 1358384600
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1012
+IsWakeAble = true
+OSglibc_major = 2
+MyCurrentTime = 1358391773
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 7
+AFSCacheAvail = 100000
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+HasCVMFS_Atlas = true
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000000000000000000000000000"
+KFlops = 1042629
+UpdatesSequenced = 1174
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4708
+AFS_SYSNAME = "i386_linux26"
+Arch = "INTEL"
+Mips = 2435
+Activity = "Busy"
+OSKernelRelease = "2.6.18-308.13.1.el5"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+OSlibc6 = "libc-2.5.so"
+LastFetchWorkCompleted = 1358391549
+HasTDP = true
+ConsoleIdle = 290884
+SubnetMask = "255.255.254.0"
+UpdatesLost = 27
+OSRedHatRelease = "Scientific Linux release 5.8 (Boron)"
+TotalJobRunTime = 891
+StartdIpAddr = "<128.105.245.15:56069>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+OSG_minor = 1
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.060000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 5
+HibernationState = "NONE"
+JavaMFlops = 310.838867
+MonitorSelfAge = 289455
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+OSglibc_micro = 0
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390882
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37829256
+VirtualMemory = 2124538
+TotalVirtualMemory = 4249076
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c015.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:02:b3:d9:0e:43"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.060000
+
+Machine = "glow-c070.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391832
+UpdateSequenceNumber = 1217
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223679.3"
+PublicClaimId = "<128.105.245.70:39810>#1358102709#113#..."
+TotalTimeMatchedIdle = 10
+HasMPI = true
+TotalClaimRunTime = 7232
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c070"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 7
+Name = "slot1@glow-c070.cs.wisc.edu"
+ImageSize = 115536
+NumPids = 1
+MonitorSelfTime = 1358391678
+TimeToLive = 2147483647
+KeyboardIdle = 290322
+LastBenchmark = 1358373471
+TotalDisk = 72254488
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358373471
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1833
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.096063
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 6805
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 282154
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391831
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 102
+TotalMemory = 3862
+DaemonStartTime = 1358102709
+EnteredCurrentActivity = 1358384600
+MyAddress = "<128.105.245.70:39810>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223679.3#1358384461"
+HasJava = true
+EnteredCurrentState = 1358384596
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391832
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 38
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1263
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00555505040000002554000000000aaa"
+KFlops = 1057312
+UpdatesSequenced = 1834
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3804
+Arch = "INTEL"
+Mips = 2558
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391831
+HasTDP = true
+ConsoleIdle = 290322
+SubnetMask = "255.255.254.0"
+UpdatesLost = 236
+TotalJobRunTime = 7232
+StartdIpAddr = "<128.105.245.70:39810>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.070000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 607.665710
+MonitorSelfAge = 288969
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358384600
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127244
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c070.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b3:f4"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.070000
+
+Machine = "glow-c070.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391833
+UpdateSequenceNumber = 1243
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223709.73"
+PublicClaimId = "<128.105.245.70:39810>#1358102709#112#..."
+TotalTimeMatchedIdle = 9
+HasMPI = true
+TotalClaimRunTime = 7225
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c070"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 14
+Name = "slot2@glow-c070.cs.wisc.edu"
+ImageSize = 109656
+NumPids = 1
+MonitorSelfTime = 1358391678
+TimeToLive = 2147483647
+KeyboardIdle = 290322
+LastBenchmark = 1358373471
+TotalDisk = 72254488
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358373471
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1866
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.096063
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 8633
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 280335
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391831
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 117
+TotalMemory = 3862
+DaemonStartTime = 1358102709
+EnteredCurrentActivity = 1358389635
+MyAddress = "<128.105.245.70:39810>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223709.73#1358387884"
+HasJava = true
+EnteredCurrentState = 1358384603
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391833
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 9
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1263
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00800000000000000008000000000000"
+KFlops = 1057312
+UpdatesSequenced = 1864
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3804
+Arch = "INTEL"
+Mips = 2558
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391831
+HasTDP = true
+ConsoleIdle = 290322
+SubnetMask = "255.255.254.0"
+UpdatesLost = 36
+TotalJobRunTime = 2198
+StartdIpAddr = "<128.105.245.70:39810>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.070000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 607.665710
+MonitorSelfAge = 288969
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389635
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127244
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c070.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b3:f4"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.070000
+
+Machine = "glow-c071.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391970
+UpdateSequenceNumber = 1407
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223724.31"
+PublicClaimId = "<128.105.245.71:46103>#1358104853#131#..."
+TotalTimeMatchedIdle = 11
+HasMPI = true
+TotalClaimRunTime = 7370
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c071"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 6
+Name = "slot1@glow-c071.cs.wisc.edu"
+ImageSize = 107580
+NumPids = 1
+MonitorSelfTime = 1358391902
+TimeToLive = 2147483647
+KeyboardIdle = 288315
+LastBenchmark = 1358371066
+TotalDisk = 72255560
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371066
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1408
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.095844
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 90047
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 196716
+MonitorSelfImageSize = 9760.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 244
+TotalMemory = 3862
+DaemonStartTime = 1358104853
+EnteredCurrentActivity = 1358391667
+MyAddress = "<128.105.245.71:46103>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223724.31#1358389446"
+HasJava = true
+EnteredCurrentState = 1358384599
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391970
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 84
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1266
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x054a8000000aaa024421000200041000"
+KFlops = 1097557
+UpdatesSequenced = 1407
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3784
+Arch = "INTEL"
+Mips = 2459
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 288315
+SubnetMask = "255.255.254.0"
+UpdatesLost = 209
+TotalJobRunTime = 303
+StartdIpAddr = "<128.105.245.71:46103>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.200000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 554.881592
+MonitorSelfAge = 287050
+LoadAvg = 1.100000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358391667
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127780
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c071.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:0e"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.100000
+TotalCondorLoadAvg = 2.200000
+
+Machine = "glow-c071.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391971
+UpdateSequenceNumber = 1387
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223718.5"
+PublicClaimId = "<128.105.245.71:46103>#1358104853#132#..."
+TotalTimeMatchedIdle = 10
+HasMPI = true
+TotalClaimRunTime = 7360
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c071"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 13
+Name = "slot2@glow-c071.cs.wisc.edu"
+ImageSize = 108748
+NumPids = 1
+MonitorSelfTime = 1358391902
+TimeToLive = 2147483647
+KeyboardIdle = 288315
+LastBenchmark = 1358371066
+TotalDisk = 72255560
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371066
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1388
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.095844
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 90517
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 196277
+MonitorSelfImageSize = 9760.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 256
+TotalMemory = 3862
+DaemonStartTime = 1358104853
+EnteredCurrentActivity = 1358390729
+MyAddress = "<128.105.245.71:46103>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223718.5#1358389444"
+HasJava = true
+EnteredCurrentState = 1358384604
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391971
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 36
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1266
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000000000000000000000000000"
+KFlops = 1097557
+UpdatesSequenced = 1387
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3784
+Arch = "INTEL"
+Mips = 2459
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 288315
+SubnetMask = "255.255.254.0"
+UpdatesLost = 20
+TotalJobRunTime = 1242
+StartdIpAddr = "<128.105.245.71:46103>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.200000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 554.881592
+MonitorSelfAge = 287050
+LoadAvg = 1.100000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390729
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127780
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c071.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:0e"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.100000
+TotalCondorLoadAvg = 2.200000
+
+Machine = "glow-c072.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391927
+UpdateSequenceNumber = 1367
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1220853.129"
+PublicClaimId = "<128.105.245.72:60862>#1358100963#125#..."
+TotalTimeMatchedIdle = 12
+HasMPI = true
+TotalClaimRunTime = 13278
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c072"
+JavaVendor = "Sun Microsystems Inc."
+Name = "slot1@glow-c072.cs.wisc.edu"
+ImageSize = 15496
+NumPids = 1
+MonitorSelfTime = 1358391855
+TimeToLive = 2147483647
+KeyboardIdle = 292162
+LastBenchmark = 1358371142
+TotalDisk = 75669884
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371142
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1366
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.083278
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 90240
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 200411
+MonitorSelfImageSize = 9768.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 214
+TotalMemory = 2026
+DaemonStartTime = 1358100963
+EnteredCurrentActivity = 1358378649
+MyAddress = "<128.105.245.72:60862>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1220853.129#1358328602"
+HasJava = true
+EnteredCurrentState = 1358378649
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391927
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 77
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x555000222aa900000000000000040100"
+KFlops = 1101446
+UpdatesSequenced = 1367
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3796
+Arch = "INTEL"
+Mips = 2645
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 292162
+SubnetMask = "255.255.254.0"
+UpdatesLost = 196
+TotalJobRunTime = 13278
+StartdIpAddr = "<128.105.245.72:60862>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.080000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 607.304688
+MonitorSelfAge = 290893
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358378649
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37834942
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c072.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b2:c8"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.040000
+TotalCondorLoadAvg = 2.080000
+
+Machine = "glow-c072.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391928
+UpdateSequenceNumber = 1383
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223718.53"
+PublicClaimId = "<128.105.245.72:60862>#1358100963#132#..."
+TotalTimeMatchedIdle = 15
+HasMPI = true
+TotalClaimRunTime = 7327
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c072"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 9
+Name = "slot2@glow-c072.cs.wisc.edu"
+ImageSize = 106828
+NumPids = 1
+MonitorSelfTime = 1358391855
+TimeToLive = 2147483647
+KeyboardIdle = 292162
+LastBenchmark = 1358371142
+TotalDisk = 75669884
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371142
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1383
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.083278
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 86267
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 204424
+MonitorSelfImageSize = 9768.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 186
+TotalMemory = 2026
+DaemonStartTime = 1358100963
+EnteredCurrentActivity = 1358390852
+MyAddress = "<128.105.245.72:60862>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223718.53#1358389444"
+HasJava = true
+EnteredCurrentState = 1358384598
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391928
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 54
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000010000000000000000000000"
+KFlops = 1101446
+UpdatesSequenced = 1383
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3796
+Arch = "INTEL"
+Mips = 2645
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 292162
+SubnetMask = "255.255.254.0"
+UpdatesLost = 27
+TotalJobRunTime = 1076
+StartdIpAddr = "<128.105.245.72:60862>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.080000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 607.304688
+MonitorSelfAge = 290893
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390852
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37834942
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c072.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b2:c8"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.040000
+TotalCondorLoadAvg = 2.080000
+
+Machine = "glow-c073.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391727
+UpdateSequenceNumber = 1372
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223720.33"
+PublicClaimId = "<128.105.245.73:34400>#1358101136#176#..."
+TotalTimeMatchedIdle = 8
+HasMPI = true
+TotalClaimRunTime = 7120
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c073"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 15
+Name = "slot1@glow-c073.cs.wisc.edu"
+ImageSize = 106824
+NumPids = 1
+MonitorSelfTime = 1358391542
+TimeToLive = 2147483647
+KeyboardIdle = 291787
+LastBenchmark = 1358374310
+TotalDisk = 72254724
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358374310
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1372
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.091721
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 11603
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 278715
+MonitorSelfImageSize = 9768.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391498
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 219
+TotalMemory = 3862
+DaemonStartTime = 1358101136
+EnteredCurrentActivity = 1358391012
+MyAddress = "<128.105.245.73:34400>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223720.33#1358389444"
+HasJava = true
+EnteredCurrentState = 1358384601
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391727
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 25
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00011501100000000090000000000000"
+KFlops = 1097335
+UpdatesSequenced = 1372
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3800
+Arch = "INTEL"
+Mips = 2537
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391498
+HasTDP = true
+ConsoleIdle = 291787
+SubnetMask = "255.255.254.0"
+UpdatesLost = 211
+TotalJobRunTime = 715
+StartdIpAddr = "<128.105.245.73:34400>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.080000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 610.132385
+MonitorSelfAge = 290407
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358391012
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127362
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c073.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:a8"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.040000
+TotalCondorLoadAvg = 2.080000
+
+Machine = "glow-c073.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391728
+UpdateSequenceNumber = 1354
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223712.3"
+PublicClaimId = "<128.105.245.73:34400>#1358101136#175#..."
+TotalTimeMatchedIdle = 13
+HasMPI = true
+TotalClaimRunTime = 7114
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c073"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 15
+Name = "slot2@glow-c073.cs.wisc.edu"
+ImageSize = 111320
+NumPids = 1
+MonitorSelfTime = 1358391542
+TimeToLive = 2147483647
+KeyboardIdle = 291787
+LastBenchmark = 1358374310
+TotalDisk = 72254724
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358374310
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1355
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.091721
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 11820
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 278537
+MonitorSelfImageSize = 9768.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391482
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 184
+TotalMemory = 3862
+DaemonStartTime = 1358101136
+EnteredCurrentActivity = 1358390272
+MyAddress = "<128.105.245.73:34400>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223712.3#1358387885"
+HasJava = true
+EnteredCurrentState = 1358384606
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391728
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 17
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00400000000000000000000000000000"
+KFlops = 1097335
+UpdatesSequenced = 1354
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3800
+Arch = "INTEL"
+Mips = 2537
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391483
+HasTDP = true
+ConsoleIdle = 291787
+SubnetMask = "255.255.254.0"
+UpdatesLost = 22
+TotalJobRunTime = 1456
+StartdIpAddr = "<128.105.245.73:34400>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.080000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 610.132385
+MonitorSelfAge = 290407
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390272
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127362
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c073.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:a8"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.040000
+TotalCondorLoadAvg = 2.080000
+
+Machine = "glow-c075.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391927
+UpdateSequenceNumber = 1363
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223705.69"
+PublicClaimId = "<128.105.245.75:51396>#1358102719#145#..."
+TotalTimeMatchedIdle = 12
+HasMPI = true
+TotalClaimRunTime = 7326
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c075"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 6
+Name = "slot1@glow-c075.cs.wisc.edu"
+ImageSize = 109000
+NumPids = 1
+MonitorSelfTime = 1358391689
+TimeToLive = 2147483647
+KeyboardIdle = 290407
+LastBenchmark = 1358371078
+TotalDisk = 72255552
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371078
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1367
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.079210
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 91955
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 196998
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 179
+TotalMemory = 3862
+DaemonStartTime = 1358102719
+EnteredCurrentActivity = 1358389198
+MyAddress = "<128.105.245.75:51396>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223705.69#1358387883"
+HasJava = true
+EnteredCurrentState = 1358384599
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391927
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 48
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000a01112aa8000000480000000001"
+KFlops = 1062936
+UpdatesSequenced = 1369
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3784
+Arch = "INTEL"
+Mips = 2561
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290407
+SubnetMask = "255.255.254.0"
+UpdatesLost = 200
+TotalJobRunTime = 2729
+StartdIpAddr = "<128.105.245.75:51396>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.170000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 612.031982
+MonitorSelfAge = 288972
+LoadAvg = 1.080000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389198
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127776
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c075.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:80"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.080000
+TotalCondorLoadAvg = 2.170000
+
+Machine = "glow-c075.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391928
+UpdateSequenceNumber = 1387
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223709.18"
+PublicClaimId = "<128.105.245.75:51396>#1358102719#146#..."
+TotalTimeMatchedIdle = 25
+HasMPI = true
+TotalClaimRunTime = 7326
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c075"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 11
+Name = "slot2@glow-c075.cs.wisc.edu"
+ImageSize = 109248
+NumPids = 1
+MonitorSelfTime = 1358391689
+TimeToLive = 2147483647
+KeyboardIdle = 290407
+LastBenchmark = 1358371078
+TotalDisk = 72255552
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371078
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1393
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.079210
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 92931
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 195995
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 184
+TotalMemory = 3862
+DaemonStartTime = 1358102719
+EnteredCurrentActivity = 1358389476
+MyAddress = "<128.105.245.75:51396>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223709.18#1358387884"
+HasJava = true
+EnteredCurrentState = 1358384599
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391928
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 53
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000000000000010000000000000"
+KFlops = 1062936
+UpdatesSequenced = 1393
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3784
+Arch = "INTEL"
+Mips = 2561
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290407
+SubnetMask = "255.255.254.0"
+UpdatesLost = 35
+TotalJobRunTime = 2452
+StartdIpAddr = "<128.105.245.75:51396>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.170000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 612.031982
+MonitorSelfAge = 288972
+LoadAvg = 1.090000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389476
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127776
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c075.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:80"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.080000
+TotalCondorLoadAvg = 2.170000
+
+Machine = "glow-c076.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391931
+UpdateSequenceNumber = 1317
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223711.21"
+PublicClaimId = "<128.105.245.76:37505>#1358101365#147#..."
+TotalTimeMatchedIdle = 16
+HasMPI = true
+TotalClaimRunTime = 7327
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c076"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 5
+Name = "slot1@glow-c076.cs.wisc.edu"
+ImageSize = 108868
+NumPids = 1
+MonitorSelfTime = 1358391773
+TimeToLive = 2147483647
+KeyboardIdle = 291766
+LastBenchmark = 1358371055
+TotalDisk = 75666396
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371055
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1314
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.087485
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 82680
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 207632
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 187
+TotalMemory = 2026
+DaemonStartTime = 1358101365
+EnteredCurrentActivity = 1358389818
+MyAddress = "<128.105.245.76:37505>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223711.21#1358387884"
+HasJava = true
+EnteredCurrentState = 1358384598
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391931
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 38
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x0055500800002a082000000000000000"
+KFlops = 1079097
+UpdatesSequenced = 1313
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3836
+Arch = "INTEL"
+Mips = 2632
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 291766
+SubnetMask = "255.255.254.0"
+UpdatesLost = 197
+TotalJobRunTime = 2113
+StartdIpAddr = "<128.105.245.76:37505>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.050000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 613.514221
+MonitorSelfAge = 290409
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389818
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37833198
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c076.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:cb:ea"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.020000
+TotalCondorLoadAvg = 2.050000
+
+Machine = "glow-c076.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391932
+UpdateSequenceNumber = 1349
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223707.26"
+PublicClaimId = "<128.105.245.76:37505>#1358101365#148#..."
+TotalTimeMatchedIdle = 14
+HasMPI = true
+TotalClaimRunTime = 7323
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c076"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 11
+Name = "slot2@glow-c076.cs.wisc.edu"
+ImageSize = 108568
+NumPids = 1
+MonitorSelfTime = 1358391773
+TimeToLive = 2147483647
+KeyboardIdle = 291766
+LastBenchmark = 1358371055
+TotalDisk = 75666396
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371055
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1350
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.087485
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 89486
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 200766
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 216
+TotalMemory = 2026
+DaemonStartTime = 1358101365
+EnteredCurrentActivity = 1358389166
+MyAddress = "<128.105.245.76:37505>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223707.26#1358387883"
+HasJava = true
+EnteredCurrentState = 1358384604
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391932
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 67
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000200000000100040400000000"
+KFlops = 1079097
+UpdatesSequenced = 1349
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3836
+Arch = "INTEL"
+Mips = 2632
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 291766
+SubnetMask = "255.255.254.0"
+UpdatesLost = 39
+TotalJobRunTime = 2766
+StartdIpAddr = "<128.105.245.76:37505>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.050000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 613.514221
+MonitorSelfAge = 290409
+LoadAvg = 1.020000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389166
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37833198
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c076.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:cb:ea"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.020000
+TotalCondorLoadAvg = 2.050000
+
+Machine = "glow-c079.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391780
+UpdateSequenceNumber = 1349
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223715.87"
+PublicClaimId = "<128.105.245.79:56567>#1358102103#131#..."
+TotalTimeMatchedIdle = 13
+HasMPI = true
+TotalClaimRunTime = 7170
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c079"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 12
+Name = "slot1@glow-c079.cs.wisc.edu"
+ImageSize = 109544
+NumPids = 1
+MonitorSelfTime = 1358391553
+TimeToLive = 2147483647
+KeyboardIdle = 290873
+LastBenchmark = 1358376897
+TotalDisk = 72253784
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358376897
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1350
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.083358
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 90020
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 198250
+MonitorSelfImageSize = 9760.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 1315
+TotalMemory = 3862
+DaemonStartTime = 1358102103
+EnteredCurrentActivity = 1358390408
+MyAddress = "<128.105.245.79:56567>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223715.87#1358389443"
+HasJava = true
+EnteredCurrentState = 1358384604
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391780
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 57
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x09550000000150000000004300000020"
+KFlops = 1077862
+UpdatesSequenced = 1351
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4672
+Arch = "INTEL"
+Mips = 2541
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290873
+SubnetMask = "255.255.254.0"
+UpdatesLost = 205
+TotalJobRunTime = 1372
+StartdIpAddr = "<128.105.245.79:56567>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.010000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 608.318970
+MonitorSelfAge = 289451
+LoadAvg = 1.000000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390408
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36126892
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c079.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:70"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.000000
+TotalCondorLoadAvg = 2.010000
+
+Machine = "glow-c079.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391781
+UpdateSequenceNumber = 1370
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223706.79"
+PublicClaimId = "<128.105.245.79:56567>#1358102103#132#..."
+TotalTimeMatchedIdle = 15
+HasMPI = true
+TotalClaimRunTime = 7170
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c079"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 15
+Name = "slot2@glow-c079.cs.wisc.edu"
+ImageSize = 110448
+NumPids = 1
+MonitorSelfTime = 1358391553
+TimeToLive = 2147483647
+KeyboardIdle = 290873
+LastBenchmark = 1358376897
+TotalDisk = 72253784
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358376897
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1373
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.083358
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 89020
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 200298
+MonitorSelfImageSize = 9760.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 244
+TotalMemory = 3862
+DaemonStartTime = 1358102103
+EnteredCurrentActivity = 1358389094
+MyAddress = "<128.105.245.79:56567>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223706.79#1358387883"
+HasJava = true
+EnteredCurrentState = 1358384604
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391781
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 76
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000000000010000200000000000"
+KFlops = 1077862
+UpdatesSequenced = 1372
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4672
+Arch = "INTEL"
+Mips = 2541
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290873
+SubnetMask = "255.255.254.0"
+UpdatesLost = 27
+TotalJobRunTime = 2687
+StartdIpAddr = "<128.105.245.79:56567>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.010000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 608.318970
+MonitorSelfAge = 289451
+LoadAvg = 1.010000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389094
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36126892
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c079.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:70"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.000000
+TotalCondorLoadAvg = 2.010000
+
+Machine = "glow-c080.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391424
+UpdateSequenceNumber = 1393
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223719.58"
+PublicClaimId = "<128.105.245.80:56596>#1358102116#144#..."
+TotalTimeMatchedIdle = 11
+HasMPI = true
+TotalClaimRunTime = 6817
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c080"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 8
+Name = "slot1@glow-c080.cs.wisc.edu"
+ImageSize = 108004
+NumPids = 1
+MonitorSelfTime = 1358391322
+TimeToLive = 2147483647
+KeyboardIdle = 290505
+LastBenchmark = 1358376372
+TotalDisk = 75666200
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358376372
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1394
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.087520
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 80472
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 208561
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 209
+TotalMemory = 2026
+DaemonStartTime = 1358102116
+EnteredCurrentActivity = 1358390907
+MyAddress = "<128.105.245.80:56596>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223719.58#1358389444"
+HasJava = true
+EnteredCurrentState = 1358384601
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391424
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 41
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1257
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x4aa800000000a0000000900120000000"
+KFlops = 1067617
+UpdatesSequenced = 1393
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3792
+Arch = "INTEL"
+Mips = 2356
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290505
+SubnetMask = "255.255.254.0"
+UpdatesLost = 209
+TotalJobRunTime = 517
+StartdIpAddr = "<128.105.245.80:56596>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.150000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 551.267944
+MonitorSelfAge = 289207
+LoadAvg = 1.070000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390907
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37833100
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c080.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b3:b4"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.070000
+TotalCondorLoadAvg = 2.150000
+
+Machine = "glow-c080.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391719
+UpdateSequenceNumber = 1370
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =

<TRUNCATED>


[23/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedTrackingManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedTrackingManager.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedTrackingManager.java
deleted file mode 100644
index a1c6fb9..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedTrackingManager.java
+++ /dev/null
@@ -1,188 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.util.Arrays;
-import java.util.BitSet;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.external.feed.api.IFeedTrackingManager;
-import org.apache.asterix.external.feed.management.FeedConnectionId;
-import org.apache.asterix.external.feed.message.FeedTupleCommitAckMessage;
-import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
-import org.apache.asterix.file.FeedOperations;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedTrackingManager implements IFeedTrackingManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedTrackingManager.class.getName());
-
-    private final BitSet allOnes;
-
-    private Map<FeedConnectionId, Map<AckId, BitSet>> ackHistory;
-    private Map<FeedConnectionId, Map<AckId, Integer>> maxBaseAcked;
-
-    public FeedTrackingManager() {
-        byte[] allOneBytes = new byte[128];
-        Arrays.fill(allOneBytes, (byte) 0xff);
-        allOnes = BitSet.valueOf(allOneBytes);
-        ackHistory = new HashMap<FeedConnectionId, Map<AckId, BitSet>>();
-        maxBaseAcked = new HashMap<FeedConnectionId, Map<AckId, Integer>>();
-    }
-
-    @Override
-    public synchronized void submitAckReport(FeedTupleCommitAckMessage ackMessage) {
-        AckId ackId = getAckId(ackMessage);
-        Map<AckId, BitSet> acksForConnection = ackHistory.get(ackMessage.getConnectionId());
-        if (acksForConnection == null) {
-            acksForConnection = new HashMap<AckId, BitSet>();
-            acksForConnection.put(ackId, BitSet.valueOf(ackMessage.getCommitAcks()));
-            ackHistory.put(ackMessage.getConnectionId(), acksForConnection);
-        }
-        BitSet currentAcks = acksForConnection.get(ackId);
-        if (currentAcks == null) {
-            currentAcks = BitSet.valueOf(ackMessage.getCommitAcks());
-            acksForConnection.put(ackId, currentAcks);
-        } else {
-            currentAcks.or(BitSet.valueOf(ackMessage.getCommitAcks()));
-        }
-        if (Arrays.equals(currentAcks.toByteArray(), allOnes.toByteArray())) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(ackMessage.getIntakePartition() + " (" + ackMessage.getBase() + ")" + " is convered");
-            }
-            Map<AckId, Integer> maxBaseAckedForConnection = maxBaseAcked.get(ackMessage.getConnectionId());
-            if (maxBaseAckedForConnection == null) {
-                maxBaseAckedForConnection = new HashMap<AckId, Integer>();
-                maxBaseAcked.put(ackMessage.getConnectionId(), maxBaseAckedForConnection);
-            }
-            Integer maxBaseAckedValue = maxBaseAckedForConnection.get(ackId);
-            if (maxBaseAckedValue == null) {
-                maxBaseAckedValue = ackMessage.getBase();
-                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
-                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
-                        ackMessage.getBase());
-            } else if (ackMessage.getBase() == maxBaseAckedValue + 1) {
-                maxBaseAckedForConnection.put(ackId, ackMessage.getBase());
-                sendCommitResponseMessage(ackMessage.getConnectionId(), ackMessage.getIntakePartition(),
-                        ackMessage.getBase());
-            } else {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Ignoring discountiuous acked base " + ackMessage.getBase() + " for " + ackId);
-                }
-            }
-
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("AckId " + ackId + " pending number of acks " + (128 * 8 - currentAcks.cardinality()));
-            }
-        }
-    }
-
-    public synchronized void disableTracking(FeedConnectionId connectionId) {
-        ackHistory.remove(connectionId);
-        maxBaseAcked.remove(connectionId);
-    }
-
-    private void sendCommitResponseMessage(FeedConnectionId connectionId, int partition, int base) {
-        FeedTupleCommitResponseMessage response = new FeedTupleCommitResponseMessage(connectionId, partition, base);
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-        String collectLocation = collectLocations.get(partition);
-        Set<String> messageDestinations = new HashSet<String>();
-        messageDestinations.add(collectLocation);
-        messageDestinations.addAll(storageLocations);
-        try {
-            JobSpecification spec = FeedOperations.buildCommitAckResponseJob(response, messageDestinations);
-            CentralFeedManager.runJob(spec, false);
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to send commit response message " + response + " exception " + e.getMessage());
-            }
-        }
-    }
-
-    private static AckId getAckId(FeedTupleCommitAckMessage ackMessage) {
-        return new AckId(ackMessage.getConnectionId(), ackMessage.getIntakePartition(), ackMessage.getBase());
-    }
-
-    private static class AckId {
-        private FeedConnectionId connectionId;
-        private int intakePartition;
-        private int base;
-
-        public AckId(FeedConnectionId connectionId, int intakePartition, int base) {
-            this.connectionId = connectionId;
-            this.intakePartition = intakePartition;
-            this.base = base;
-        }
-
-        @Override
-        public boolean equals(Object o) {
-            if (this == o) {
-                return true;
-            }
-            if (!(o instanceof AckId)) {
-                return false;
-            }
-            AckId other = (AckId) o;
-            return other.getConnectionId().equals(connectionId) && other.getIntakePartition() == intakePartition
-                    && other.getBase() == base;
-        }
-
-        @Override
-        public String toString() {
-            return connectionId + "[" + intakePartition + "]" + "(" + base + ")";
-        }
-
-        @Override
-        public int hashCode() {
-            return toString().hashCode();
-        }
-
-        public FeedConnectionId getConnectionId() {
-            return connectionId;
-        }
-
-        public int getIntakePartition() {
-            return intakePartition;
-        }
-
-        public int getBase() {
-            return base;
-        }
-
-    }
-
-    @Override
-    public void disableAcking(FeedConnectionId connectionId) {
-        ackHistory.remove(connectionId);
-        maxBaseAcked.remove(connectionId);
-        if (LOGGER.isLoggable(Level.WARNING)) {
-            LOGGER.warning("Acking disabled for " + connectionId);
-        }
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkCollection.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkCollection.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkCollection.java
deleted file mode 100644
index 9d746c8..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkCollection.java
+++ /dev/null
@@ -1,206 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.api.common.SessionConfig;
-import org.apache.asterix.api.common.SessionConfig.OutputFormat;
-import org.apache.asterix.aql.translator.QueryTranslator;
-import org.apache.asterix.compiler.provider.AqlCompilationProvider;
-import org.apache.asterix.compiler.provider.ILangCompilationProvider;
-import org.apache.asterix.external.feed.api.IFeedWork;
-import org.apache.asterix.external.feed.api.IFeedWorkEventListener;
-import org.apache.asterix.external.feed.management.FeedCollectInfo;
-import org.apache.asterix.external.feed.management.FeedConnectionRequest;
-import org.apache.asterix.external.feed.management.FeedConnectionRequest.ConnectionStatus;
-import org.apache.asterix.lang.aql.statement.SubscribeFeedStatement;
-import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.statement.DataverseDecl;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.api.job.JobId;
-
-/**
- * A collection of feed management related task, each represented as an implementation of {@code IFeedWork}.
- */
-public class FeedWorkCollection {
-
-    private static Logger LOGGER = Logger.getLogger(FeedWorkCollection.class.getName());
-    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
-
-    /**
-     * The task of subscribing to a feed to obtain data.
-     */
-    public static class SubscribeFeedWork implements IFeedWork {
-
-        private final Runnable runnable;
-
-        private final FeedConnectionRequest request;
-
-        @Override
-        public Runnable getRunnable() {
-            return runnable;
-        }
-
-        public SubscribeFeedWork(String[] locations, FeedConnectionRequest request) {
-            this.runnable = new SubscribeFeedWorkRunnable(locations, request);
-            this.request = request;
-        }
-
-        private static class SubscribeFeedWorkRunnable implements Runnable {
-
-            private final FeedConnectionRequest request;
-            private final String[] locations;
-
-            public SubscribeFeedWorkRunnable(String[] locations, FeedConnectionRequest request) {
-                this.request = request;
-                this.locations = locations;
-            }
-
-            @Override
-            public void run() {
-                try {
-                    PrintWriter writer = new PrintWriter(System.out, true);
-                    SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-                    DataverseDecl dataverseDecl = new DataverseDecl(
-                            new Identifier(request.getReceivingFeedId().getDataverse()));
-                    SubscribeFeedStatement subscribeStmt = new SubscribeFeedStatement(locations, request);
-                    List<Statement> statements = new ArrayList<Statement>();
-                    statements.add(dataverseDecl);
-                    statements.add(subscribeStmt);
-                    QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
-                    translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                            QueryTranslator.ResultDelivery.SYNC);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Submitted connection requests for execution: " + request);
-                    }
-                } catch (Exception e) {
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe("Exception in executing " + request);
-                    }
-                    throw new RuntimeException(e);
-                }
-            }
-        }
-
-        public static class FeedSubscribeWorkEventListener implements IFeedWorkEventListener {
-
-            @Override
-            public void workFailed(IFeedWork work, Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request
-                            + " failed with exception " + e);
-                }
-            }
-
-            @Override
-            public void workCompleted(IFeedWork work) {
-                ((SubscribeFeedWork) work).request.setSubscriptionStatus(ConnectionStatus.ACTIVE);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.warning(" Feed subscription request " + ((SubscribeFeedWork) work).request + " completed ");
-                }
-            }
-
-        }
-
-        public FeedConnectionRequest getRequest() {
-            return request;
-        }
-
-        @Override
-        public String toString() {
-            return "SubscribeFeedWork for [" + request + "]";
-        }
-
-    }
-
-    /**
-     * The task of activating a set of feeds.
-     */
-    public static class ActivateFeedWork implements IFeedWork {
-
-        private final Runnable runnable;
-
-        @Override
-        public Runnable getRunnable() {
-            return runnable;
-        }
-
-        public ActivateFeedWork(List<FeedCollectInfo> feedsToRevive) {
-            this.runnable = new FeedsActivateRunnable(feedsToRevive);
-        }
-
-        public ActivateFeedWork() {
-            this.runnable = new FeedsActivateRunnable();
-        }
-
-        private static class FeedsActivateRunnable implements Runnable {
-
-            private List<FeedCollectInfo> feedsToRevive;
-            private Mode mode;
-
-            public enum Mode {
-                REVIVAL_POST_NODE_REJOIN
-            }
-
-            public FeedsActivateRunnable(List<FeedCollectInfo> feedsToRevive) {
-                this.feedsToRevive = feedsToRevive;
-            }
-
-            public FeedsActivateRunnable() {
-            }
-
-            @Override
-            public void run() {
-                switch (mode) {
-                    case REVIVAL_POST_NODE_REJOIN:
-                        try {
-                            Thread.sleep(10000);
-                        } catch (InterruptedException e1) {
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Attempt to resume feed interrupted");
-                            }
-                            throw new IllegalStateException(e1.getMessage());
-                        }
-                        for (FeedCollectInfo finfo : feedsToRevive) {
-                            try {
-                                JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
-                                if (LOGGER.isLoggable(Level.INFO)) {
-                                    LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
-                                    LOGGER.info("Job:" + finfo.jobSpec);
-                                }
-                            } catch (Exception e) {
-                                if (LOGGER.isLoggable(Level.WARNING)) {
-                                    LOGGER.warning(
-                                            "Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
-                                }
-                            }
-                        }
-                }
-            }
-
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkRequestResponseHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkRequestResponseHandler.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkRequestResponseHandler.java
deleted file mode 100644
index b30d8a7..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedWorkRequestResponseHandler.java
+++ /dev/null
@@ -1,269 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.api.IClusterManagementWork;
-import org.apache.asterix.common.api.IClusterManagementWorkResponse;
-import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
-import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
-import org.apache.asterix.external.feed.watch.FeedJobInfo;
-import org.apache.asterix.metadata.cluster.AddNodeWork;
-import org.apache.asterix.metadata.cluster.AddNodeWorkResponse;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.asterix.om.util.AsterixClusterProperties;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.constraints.Constraint;
-import org.apache.hyracks.api.constraints.PartitionConstraintHelper;
-import org.apache.hyracks.api.constraints.expressions.ConstantExpression;
-import org.apache.hyracks.api.constraints.expressions.ConstraintExpression;
-import org.apache.hyracks.api.constraints.expressions.ConstraintExpression.ExpressionTag;
-import org.apache.hyracks.api.constraints.expressions.LValueConstraintExpression;
-import org.apache.hyracks.api.constraints.expressions.PartitionCountExpression;
-import org.apache.hyracks.api.constraints.expressions.PartitionLocationExpression;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedWorkRequestResponseHandler implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedWorkRequestResponseHandler.class.getName());
-
-    private final LinkedBlockingQueue<IClusterManagementWorkResponse> inbox;
-
-    private Map<Integer, Map<String, List<FeedJobInfo>>> feedsWaitingForResponse = new HashMap<Integer, Map<String, List<FeedJobInfo>>>();
-
-    public FeedWorkRequestResponseHandler(LinkedBlockingQueue<IClusterManagementWorkResponse> inbox) {
-        this.inbox = inbox;
-    }
-
-    @Override
-    public void run() {
-        while (true) {
-            IClusterManagementWorkResponse response = null;
-            try {
-                response = inbox.take();
-            } catch (InterruptedException e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Interrupted exception " + e.getMessage());
-                }
-            }
-            IClusterManagementWork submittedWork = response.getWork();
-            Map<String, String> nodeSubstitution = new HashMap<String, String>();
-            switch (submittedWork.getClusterManagementWorkType()) {
-                case ADD_NODE:
-                    AddNodeWork addNodeWork = (AddNodeWork) submittedWork;
-                    int workId = addNodeWork.getWorkId();
-                    Map<String, List<FeedJobInfo>> failureAnalysis = feedsWaitingForResponse.get(workId);
-                    AddNodeWorkResponse resp = (AddNodeWorkResponse) response;
-                    List<String> nodesAdded = resp.getNodesAdded();
-                    List<String> unsubstitutedNodes = new ArrayList<String>();
-                    unsubstitutedNodes.addAll(addNodeWork.getDeadNodes());
-                    int nodeIndex = 0;
-
-                    /** form a mapping between the failed node and its substitute **/
-                    if (nodesAdded != null && nodesAdded.size() > 0) {
-                        for (String failedNodeId : addNodeWork.getDeadNodes()) {
-                            String substitute = nodesAdded.get(nodeIndex);
-                            nodeSubstitution.put(failedNodeId, substitute);
-                            nodeIndex = (nodeIndex + 1) % nodesAdded.size();
-                            unsubstitutedNodes.remove(failedNodeId);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Node " + substitute + " chosen to substiute lost node " + failedNodeId);
-                            }
-                        }
-                    }
-                    if (unsubstitutedNodes.size() > 0) {
-                        String[] participantNodes = AsterixClusterProperties.INSTANCE.getParticipantNodes()
-                                .toArray(new String[] {});
-                        nodeIndex = 0;
-                        for (String unsubstitutedNode : unsubstitutedNodes) {
-                            nodeSubstitution.put(unsubstitutedNode, participantNodes[nodeIndex]);
-                            if (LOGGER.isLoggable(Level.INFO)) {
-                                LOGGER.info("Node " + participantNodes[nodeIndex] + " chosen to substiute lost node "
-                                        + unsubstitutedNode);
-                            }
-                            nodeIndex = (nodeIndex + 1) % participantNodes.length;
-                        }
-
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Request " + resp.getWork() + " completed using internal nodes");
-                        }
-                    }
-
-                    // alter failed feed intake jobs
-
-                    for (Entry<String, List<FeedJobInfo>> entry : failureAnalysis.entrySet()) {
-                        String failedNode = entry.getKey();
-                        List<FeedJobInfo> impactedJobInfos = entry.getValue();
-                        for (FeedJobInfo info : impactedJobInfos) {
-                            JobSpecification spec = info.getSpec();
-                            replaceNode(spec, failedNode, nodeSubstitution.get(failedNode));
-                            info.setSpec(spec);
-                        }
-                    }
-
-                    Set<FeedIntakeInfo> revisedIntakeJobs = new HashSet<FeedIntakeInfo>();
-                    Set<FeedConnectJobInfo> revisedConnectJobInfos = new HashSet<FeedConnectJobInfo>();
-
-                    for (List<FeedJobInfo> infos : failureAnalysis.values()) {
-                        for (FeedJobInfo info : infos) {
-                            switch (info.getJobType()) {
-                                case INTAKE:
-                                    revisedIntakeJobs.add((FeedIntakeInfo) info);
-                                    break;
-                                case FEED_CONNECT:
-                                    revisedConnectJobInfos.add((FeedConnectJobInfo) info);
-                                    break;
-                            }
-                        }
-                    }
-
-                    IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-                    try {
-                        for (FeedIntakeInfo info : revisedIntakeJobs) {
-                            hcc.startJob(info.getSpec());
-                        }
-                        Thread.sleep(2000);
-                        for (FeedConnectJobInfo info : revisedConnectJobInfos) {
-                            hcc.startJob(info.getSpec());
-                            Thread.sleep(2000);
-                        }
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to start revised job post failure");
-                        }
-                    }
-
-                    break;
-                case REMOVE_NODE:
-                    throw new IllegalStateException("Invalid work submitted");
-            }
-        }
-    }
-
-    private void replaceNode(JobSpecification jobSpec, String failedNodeId, String replacementNode) {
-        Set<Constraint> userConstraints = jobSpec.getUserConstraints();
-        List<Constraint> locationConstraintsToReplace = new ArrayList<Constraint>();
-        List<Constraint> countConstraintsToReplace = new ArrayList<Constraint>();
-        List<OperatorDescriptorId> modifiedOperators = new ArrayList<OperatorDescriptorId>();
-        Map<OperatorDescriptorId, List<Constraint>> candidateConstraints = new HashMap<OperatorDescriptorId, List<Constraint>>();
-        Map<OperatorDescriptorId, Map<Integer, String>> newConstraints = new HashMap<OperatorDescriptorId, Map<Integer, String>>();
-        OperatorDescriptorId opId = null;
-        for (Constraint constraint : userConstraints) {
-            LValueConstraintExpression lexpr = constraint.getLValue();
-            ConstraintExpression cexpr = constraint.getRValue();
-            switch (lexpr.getTag()) {
-                case PARTITION_COUNT:
-                    opId = ((PartitionCountExpression) lexpr).getOperatorDescriptorId();
-                    if (modifiedOperators.contains(opId)) {
-                        countConstraintsToReplace.add(constraint);
-                    } else {
-                        List<Constraint> clist = candidateConstraints.get(opId);
-                        if (clist == null) {
-                            clist = new ArrayList<Constraint>();
-                            candidateConstraints.put(opId, clist);
-                        }
-                        clist.add(constraint);
-                    }
-                    break;
-                case PARTITION_LOCATION:
-                    opId = ((PartitionLocationExpression) lexpr).getOperatorDescriptorId();
-                    String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
-                    if (oldLocation.equals(failedNodeId)) {
-                        locationConstraintsToReplace.add(constraint);
-                        modifiedOperators.add(((PartitionLocationExpression) lexpr).getOperatorDescriptorId());
-                        Map<Integer, String> newLocs = newConstraints.get(opId);
-                        if (newLocs == null) {
-                            newLocs = new HashMap<Integer, String>();
-                            newConstraints.put(opId, newLocs);
-                        }
-                        int partition = ((PartitionLocationExpression) lexpr).getPartition();
-                        newLocs.put(partition, replacementNode);
-                    } else {
-                        if (modifiedOperators.contains(opId)) {
-                            locationConstraintsToReplace.add(constraint);
-                            Map<Integer, String> newLocs = newConstraints.get(opId);
-                            if (newLocs == null) {
-                                newLocs = new HashMap<Integer, String>();
-                                newConstraints.put(opId, newLocs);
-                            }
-                            int partition = ((PartitionLocationExpression) lexpr).getPartition();
-                            newLocs.put(partition, oldLocation);
-                        } else {
-                            List<Constraint> clist = candidateConstraints.get(opId);
-                            if (clist == null) {
-                                clist = new ArrayList<Constraint>();
-                                candidateConstraints.put(opId, clist);
-                            }
-                            clist.add(constraint);
-                        }
-                    }
-                    break;
-                default:
-                    break;
-            }
-        }
-
-        jobSpec.getUserConstraints().removeAll(locationConstraintsToReplace);
-        jobSpec.getUserConstraints().removeAll(countConstraintsToReplace);
-
-        for (OperatorDescriptorId mopId : modifiedOperators) {
-            List<Constraint> clist = candidateConstraints.get(mopId);
-            if (clist != null && !clist.isEmpty()) {
-                jobSpec.getUserConstraints().removeAll(clist);
-
-                for (Constraint c : clist) {
-                    if (c.getLValue().getTag().equals(ExpressionTag.PARTITION_LOCATION)) {
-                        ConstraintExpression cexpr = c.getRValue();
-                        int partition = ((PartitionLocationExpression) c.getLValue()).getPartition();
-                        String oldLocation = (String) ((ConstantExpression) cexpr).getValue();
-                        newConstraints.get(mopId).put(partition, oldLocation);
-                    }
-                }
-            }
-        }
-
-        for (Entry<OperatorDescriptorId, Map<Integer, String>> entry : newConstraints.entrySet()) {
-            OperatorDescriptorId nopId = entry.getKey();
-            Map<Integer, String> clist = entry.getValue();
-            IOperatorDescriptor op = jobSpec.getOperatorMap().get(nopId);
-            String[] locations = new String[clist.size()];
-            for (int i = 0; i < locations.length; i++) {
-                locations[i] = clist.get(i);
-            }
-            PartitionConstraintHelper.addAbsoluteLocationConstraint(jobSpec, op, locations);
-        }
-
-    }
-
-    public void registerFeedWork(int workId, Map<String, List<FeedJobInfo>> impactedJobs) {
-        feedsWaitingForResponse.put(workId, impactedJobs);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedsActivator.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedsActivator.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedsActivator.java
deleted file mode 100644
index dc02a53..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedsActivator.java
+++ /dev/null
@@ -1,118 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.api.common.SessionConfig;
-import org.apache.asterix.api.common.SessionConfig.OutputFormat;
-import org.apache.asterix.aql.translator.QueryTranslator;
-import org.apache.asterix.compiler.provider.AqlCompilationProvider;
-import org.apache.asterix.compiler.provider.ILangCompilationProvider;
-import org.apache.asterix.external.feed.management.FeedCollectInfo;
-import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.statement.ConnectFeedStatement;
-import org.apache.asterix.lang.common.statement.DataverseDecl;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.api.job.JobId;
-
-public class FeedsActivator implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
-    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
-
-    private List<FeedCollectInfo> feedsToRevive;
-    private Mode mode;
-
-    public enum Mode {
-        REVIVAL_POST_CLUSTER_REBOOT,
-        REVIVAL_POST_NODE_REJOIN
-    }
-
-    public FeedsActivator() {
-        this.mode = Mode.REVIVAL_POST_CLUSTER_REBOOT;
-    }
-
-    public FeedsActivator(List<FeedCollectInfo> feedsToRevive) {
-        this.feedsToRevive = feedsToRevive;
-        this.mode = Mode.REVIVAL_POST_NODE_REJOIN;
-    }
-
-    @Override
-    public void run() {
-        switch (mode) {
-            case REVIVAL_POST_CLUSTER_REBOOT:
-                //revivePostClusterReboot();
-                break;
-            case REVIVAL_POST_NODE_REJOIN:
-                try {
-                    Thread.sleep(10000);
-                } catch (InterruptedException e1) {
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Attempt to resume feed interrupted");
-                    }
-                    throw new IllegalStateException(e1.getMessage());
-                }
-                for (FeedCollectInfo finfo : feedsToRevive) {
-                    try {
-                        JobId jobId = AsterixAppContextInfo.getInstance().getHcc().startJob(finfo.jobSpec);
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Resumed feed :" + finfo.feedConnectionId + " job id " + jobId);
-                            LOGGER.info("Job:" + finfo.jobSpec);
-                        }
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.WARNING)) {
-                            LOGGER.warning("Unable to resume feed " + finfo.feedConnectionId + " " + e.getMessage());
-                        }
-                    }
-                }
-        }
-    }
-
-    public void reviveFeed(String dataverse, String feedName, String dataset, String feedPolicy) {
-        PrintWriter writer = new PrintWriter(System.out, true);
-        SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-        try {
-            DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(dataverse));
-            ConnectFeedStatement stmt = new ConnectFeedStatement(new Identifier(dataverse), new Identifier(feedName),
-                    new Identifier(dataset), feedPolicy, 0);
-            stmt.setForceConnect(true);
-            List<Statement> statements = new ArrayList<Statement>();
-            statements.add(dataverseDecl);
-            statements.add(stmt);
-            QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
-            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                    QueryTranslator.ResultDelivery.SYNC);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Resumed feed: " + dataverse + ":" + dataset + " using policy " + feedPolicy);
-            }
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Exception in resuming loser feed: " + dataverse + ":" + dataset + " using policy "
-                        + feedPolicy + " Exception " + e.getMessage());
-            }
-        }
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
deleted file mode 100644
index 77c6a54..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/file/ExternalIndexingOperations.java
+++ /dev/null
@@ -1,760 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.file;
-
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.Collections;
-import java.util.Date;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-
-import org.apache.asterix.common.api.ILocalResourceMetadata;
-import org.apache.asterix.common.config.AsterixStorageProperties;
-import org.apache.asterix.common.config.DatasetConfig.DatasetType;
-import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
-import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
-import org.apache.asterix.common.config.DatasetConfig.IndexType;
-import org.apache.asterix.common.config.IAsterixPropertiesProvider;
-import org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
-import org.apache.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
-import org.apache.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
-import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.external.indexing.FilesIndexDescription;
-import org.apache.asterix.external.indexing.IndexingConstants;
-import org.apache.asterix.external.operators.ExternalDataScanOperatorDescriptor;
-import org.apache.asterix.external.operators.ExternalDatasetIndexesAbortOperatorDescriptor;
-import org.apache.asterix.external.operators.ExternalDatasetIndexesCommitOperatorDescriptor;
-import org.apache.asterix.external.operators.ExternalDatasetIndexesRecoverOperatorDescriptor;
-import org.apache.asterix.external.operators.ExternalFilesIndexOperatorDescriptor;
-import org.apache.asterix.external.operators.IndexInfoOperatorDescriptor;
-import org.apache.asterix.external.provider.AdapterFactoryProvider;
-import org.apache.asterix.external.util.ExternalDataConstants;
-import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
-import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
-import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
-import org.apache.asterix.metadata.MetadataException;
-import org.apache.asterix.metadata.MetadataManager;
-import org.apache.asterix.metadata.declared.AqlMetadataProvider;
-import org.apache.asterix.metadata.entities.Dataset;
-import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
-import org.apache.asterix.metadata.entities.Index;
-import org.apache.asterix.metadata.utils.DatasetUtils;
-import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
-import org.apache.asterix.om.types.ARecordType;
-import org.apache.asterix.om.types.ATypeTag;
-import org.apache.asterix.om.types.BuiltinType;
-import org.apache.asterix.om.types.IAType;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.asterix.om.util.NonTaggedFormatUtil;
-import org.apache.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
-import org.apache.asterix.transaction.management.resource.ExternalBTreeLocalResourceMetadata;
-import org.apache.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
-import org.apache.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
-import org.apache.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
-import org.apache.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileStatus;
-import org.apache.hadoop.fs.FileSystem;
-import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.hdfs.DistributedFileSystem;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
-import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
-import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
-import org.apache.hyracks.api.dataflow.value.ITypeTraits;
-import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
-import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
-import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
-import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
-import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelperFactory;
-import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
-import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
-import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
-import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
-import org.apache.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeDataflowHelperFactory;
-import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
-import org.apache.hyracks.storage.common.file.LocalResource;
-
-public class ExternalIndexingOperations {
-
-    public static final List<List<String>> FILE_INDEX_FIELD_NAMES = new ArrayList<List<String>>();
-    public static final ArrayList<IAType> FILE_INDEX_FIELD_TYPES = new ArrayList<IAType>();
-
-    static {
-        FILE_INDEX_FIELD_NAMES.add(new ArrayList<String>(Arrays.asList("")));
-        FILE_INDEX_FIELD_TYPES.add(BuiltinType.ASTRING);
-    }
-
-    public static boolean isIndexible(ExternalDatasetDetails ds) {
-        String adapter = ds.getAdapter();
-        if (adapter.equalsIgnoreCase(ExternalDataConstants.ALIAS_HDFS_ADAPTER)) {
-            return true;
-        }
-        return false;
-    }
-
-    public static boolean isRefereshActive(ExternalDatasetDetails ds) {
-        return ds.getState() != ExternalDatasetTransactionState.COMMIT;
-    }
-
-    public static boolean isValidIndexName(String datasetName, String indexName) {
-        return (!datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX).equals(indexName));
-    }
-
-    public static String getFilesIndexName(String datasetName) {
-        return datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX);
-    }
-
-    public static int getRIDSize(Dataset dataset) {
-        ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
-        return IndexingConstants.getRIDSize(dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT));
-    }
-
-    public static IBinaryComparatorFactory[] getComparatorFactories(Dataset dataset) {
-        ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
-        return IndexingConstants.getComparatorFactories((dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT)));
-    }
-
-    public static IBinaryComparatorFactory[] getBuddyBtreeComparatorFactories() {
-        return IndexingConstants.getBuddyBtreeComparatorFactories();
-    }
-
-    public static ArrayList<ExternalFile> getSnapshotFromExternalFileSystem(Dataset dataset)
-            throws AlgebricksException {
-        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
-        ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
-        try {
-            // Create the file system object
-            FileSystem fs = getFileSystemObject(datasetDetails.getProperties());
-            // Get paths of dataset
-            String path = datasetDetails.getProperties().get(ExternalDataConstants.KEY_PATH);
-            String[] paths = path.split(",");
-
-            // Add fileStatuses to files
-            for (String aPath : paths) {
-                FileStatus[] fileStatuses = fs.listStatus(new Path(aPath));
-                for (int i = 0; i < fileStatuses.length; i++) {
-                    int nextFileNumber = files.size();
-                    if (fileStatuses[i].isDirectory()) {
-                        listSubFiles(dataset, fs, fileStatuses[i], files);
-                    } else {
-                        files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber,
-                                fileStatuses[i].getPath().toUri().getPath(),
-                                new Date(fileStatuses[i].getModificationTime()), fileStatuses[i].getLen(),
-                                ExternalFilePendingOp.PENDING_NO_OP));
-                    }
-                }
-            }
-            // Close file system
-            fs.close();
-            if (files.size() == 0) {
-                throw new AlgebricksException("File Snapshot retrieved from external file system is empty");
-            }
-            return files;
-        } catch (Exception e) {
-            e.printStackTrace();
-            throw new AlgebricksException("Unable to get list of HDFS files " + e);
-        }
-    }
-
-    /* list all files under the directory
-     * src is expected to be a folder
-     */
-    private static void listSubFiles(Dataset dataset, FileSystem srcFs, FileStatus src, ArrayList<ExternalFile> files)
-            throws IOException {
-        Path path = src.getPath();
-        FileStatus[] fileStatuses = srcFs.listStatus(path);
-        for (int i = 0; i < fileStatuses.length; i++) {
-            int nextFileNumber = files.size();
-            if (fileStatuses[i].isDirectory()) {
-                listSubFiles(dataset, srcFs, fileStatuses[i], files);
-            } else {
-                files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber,
-                        fileStatuses[i].getPath().toUri().getPath(), new Date(fileStatuses[i].getModificationTime()),
-                        fileStatuses[i].getLen(), ExternalFilePendingOp.PENDING_NO_OP));
-            }
-        }
-    }
-
-    public static FileSystem getFileSystemObject(Map<String, String> map) throws IOException {
-        Configuration conf = new Configuration();
-        conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_URI, map.get(ExternalDataConstants.KEY_HDFS_URL).trim());
-        conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_CLASS, DistributedFileSystem.class.getName());
-        return FileSystem.get(conf);
-    }
-
-    public static JobSpecification buildFilesIndexReplicationJobSpec(Dataset dataset,
-            ArrayList<ExternalFile> externalFilesSnapshot, AqlMetadataProvider metadataProvider, boolean createIndex)
-                    throws MetadataException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(), dataset.getDatasetName(),
-                        getFilesIndexName(dataset.getDatasetName()), true);
-        IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
-        FilesIndexDescription filesIndexDescription = new FilesIndexDescription();
-        ILocalResourceMetadata localResourceMetadata = new ExternalBTreeLocalResourceMetadata(
-                filesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS, filesIndexDescription.FILES_INDEX_COMP_FACTORIES,
-                new int[] { 0 }, false, dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties);
-        PersistentLocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
-                localResourceMetadata, LocalResource.ExternalBTreeResource);
-        ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
-                mergePolicyFactory, mergePolicyFactoryProperties,
-                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
-        ExternalFilesIndexOperatorDescriptor externalFilesOp = new ExternalFilesIndexOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                secondaryFileSplitProvider, indexDataflowHelperFactory, localResourceFactoryProvider,
-                externalFilesSnapshot, createIndex);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp,
-                secondarySplitsAndConstraint.second);
-        spec.addRoot(externalFilesOp);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    /**
-     * This method create an indexing operator that index records in HDFS
-     *
-     * @param jobSpec
-     * @param itemType
-     * @param dataset
-     * @param files
-     * @param indexerDesc
-     * @return
-     * @throws Exception
-     */
-    private static Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> getExternalDataIndexingOperator(
-            JobSpecification jobSpec, IAType itemType, Dataset dataset, List<ExternalFile> files,
-            RecordDescriptor indexerDesc, AqlMetadataProvider metadataProvider) throws Exception {
-        ExternalDatasetDetails externalDatasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
-        Map<String, String> configuration = externalDatasetDetails.getProperties();
-        IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(externalDatasetDetails.getAdapter(),
-                configuration, (ARecordType) itemType, files, true);
-        return new Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint>(
-                new ExternalDataScanOperatorDescriptor(jobSpec, indexerDesc, adapterFactory),
-                adapterFactory.getPartitionConstraint());
-    }
-
-    public static Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> createExternalIndexingOp(
-            JobSpecification spec, AqlMetadataProvider metadataProvider, Dataset dataset, ARecordType itemType,
-            RecordDescriptor indexerDesc, List<ExternalFile> files) throws Exception {
-        if (files == null) {
-            files = MetadataManager.INSTANCE.getDatasetExternalFiles(metadataProvider.getMetadataTxnContext(), dataset);
-        }
-        return getExternalDataIndexingOperator(spec, itemType, dataset, files, indexerDesc, metadataProvider);
-    }
-
-    /**
-     * At the end of this method, we expect to have 4 sets as follows:
-     * metadataFiles should contain only the files that are appended in their original state
-     * addedFiles should contain new files that has number assigned starting after the max original file number
-     * deletedFiles should contain files that are no longer there in the file system
-     * appendedFiles should have the new file information of existing files
-     * The method should return false in case of zero delta
-     *
-     * @param dataset
-     * @param metadataFiles
-     * @param addedFiles
-     * @param deletedFiles
-     * @param appendedFiles
-     * @return
-     * @throws MetadataException
-     * @throws AlgebricksException
-     */
-    public static boolean isDatasetUptodate(Dataset dataset, List<ExternalFile> metadataFiles,
-            List<ExternalFile> addedFiles, List<ExternalFile> deletedFiles, List<ExternalFile> appendedFiles)
-                    throws MetadataException, AlgebricksException {
-        boolean uptodate = true;
-        int newFileNumber = metadataFiles.get(metadataFiles.size() - 1).getFileNumber() + 1;
-
-        ArrayList<ExternalFile> fileSystemFiles = getSnapshotFromExternalFileSystem(dataset);
-
-        // Loop over file system files < taking care of added files >
-        for (ExternalFile fileSystemFile : fileSystemFiles) {
-            boolean fileFound = false;
-            Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator();
-            while (mdFilesIterator.hasNext()) {
-                ExternalFile metadataFile = mdFilesIterator.next();
-                if (fileSystemFile.getFileName().equals(metadataFile.getFileName())) {
-                    // Same file name
-                    if (fileSystemFile.getLastModefiedTime().equals(metadataFile.getLastModefiedTime())) {
-                        // Same timestamp
-                        if (fileSystemFile.getSize() == metadataFile.getSize()) {
-                            // Same size -> no op
-                            mdFilesIterator.remove();
-                            fileFound = true;
-                        } else {
-                            // Different size -> append op
-                            metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_APPEND_OP);
-                            fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_APPEND_OP);
-                            appendedFiles.add(fileSystemFile);
-                            fileFound = true;
-                            uptodate = false;
-                        }
-                    } else {
-                        // Same file name, Different file mod date -> delete and add
-                        metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
-                        deletedFiles
-                                .add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(), 0,
-                                        metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
-                                        metadataFile.getSize(), ExternalFilePendingOp.PENDING_DROP_OP));
-                        fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
-                        fileSystemFile.setFileNumber(newFileNumber);
-                        addedFiles.add(fileSystemFile);
-                        newFileNumber++;
-                        fileFound = true;
-                        uptodate = false;
-                    }
-                }
-                if (fileFound) {
-                    break;
-                }
-            }
-            if (!fileFound) {
-                // File not stored previously in metadata -> pending add op
-                fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
-                fileSystemFile.setFileNumber(newFileNumber);
-                addedFiles.add(fileSystemFile);
-                newFileNumber++;
-                uptodate = false;
-            }
-        }
-
-        // Done with files from external file system -> metadata files now contain both deleted files and appended ones
-        // first, correct number assignment to deleted and updated files
-        for (ExternalFile deletedFile : deletedFiles) {
-            deletedFile.setFileNumber(newFileNumber);
-            newFileNumber++;
-        }
-        for (ExternalFile appendedFile : appendedFiles) {
-            appendedFile.setFileNumber(newFileNumber);
-            newFileNumber++;
-        }
-
-        // include the remaining deleted files
-        Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator();
-        while (mdFilesIterator.hasNext()) {
-            ExternalFile metadataFile = mdFilesIterator.next();
-            if (metadataFile.getPendingOp() == ExternalFilePendingOp.PENDING_NO_OP) {
-                metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
-                deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(),
-                        newFileNumber, metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
-                        metadataFile.getSize(), metadataFile.getPendingOp()));
-                newFileNumber++;
-                uptodate = false;
-            }
-        }
-        return uptodate;
-    }
-
-    public static Dataset createTransactionDataset(Dataset dataset) {
-        ExternalDatasetDetails originalDsd = (ExternalDatasetDetails) dataset.getDatasetDetails();
-        ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(),
-                originalDsd.getTimestamp(), ExternalDatasetTransactionState.BEGIN);
-        Dataset transactionDatset = new Dataset(dataset.getDataverseName(), dataset.getDatasetName(),
-                dataset.getItemTypeDataverseName(), dataset.getItemTypeName(), dataset.getNodeGroupName(),
-                dataset.getCompactionPolicy(), dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(),
-                DatasetType.EXTERNAL, dataset.getDatasetId(), dataset.getPendingOp());
-        return transactionDatset;
-    }
-
-    public static boolean isFileIndex(Index index) {
-        return (index.getIndexName().equals(getFilesIndexName(index.getDatasetName())));
-    }
-
-    public static JobSpecification buildDropFilesIndexJobSpec(CompiledIndexDropStatement indexDropStmt,
-            AqlMetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException, MetadataException {
-        String dataverseName = indexDropStmt.getDataverseName() == null ? metadataProvider.getDefaultDataverseName()
-                : indexDropStmt.getDataverseName();
-        String datasetName = indexDropStmt.getDatasetName();
-        String indexName = indexDropStmt.getIndexName();
-        boolean temp = dataset.getDatasetDetails().isTemp();
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForFilesIndex(dataverseName, datasetName, indexName, true);
-        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-        IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                splitsAndConstraint.first,
-                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
-                        compactionInfo.first, compactionInfo.second,
-                        new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, !temp));
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop,
-                splitsAndConstraint.second);
-        spec.addRoot(btreeDrop);
-
-        return spec;
-    }
-
-    public static JobSpecification buildFilesIndexUpdateOp(Dataset ds, List<ExternalFile> metadataFiles,
-            List<ExternalFile> deletedFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles,
-            AqlMetadataProvider metadataProvider) throws MetadataException, AlgebricksException {
-        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
-        for (ExternalFile file : metadataFiles) {
-            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
-                files.add(file);
-            } else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
-                for (ExternalFile appendedFile : appendedFiles) {
-                    if (appendedFile.getFileName().equals(file.getFileName())) {
-                        files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(), file.getFileNumber(),
-                                file.getFileName(), file.getLastModefiedTime(), appendedFile.getSize(),
-                                ExternalFilePendingOp.PENDING_NO_OP));
-                    }
-                }
-            }
-        }
-        for (ExternalFile file : addedFiles) {
-            files.add(file);
-        }
-        Collections.sort(files);
-        return buildFilesIndexReplicationJobSpec(ds, files, metadataProvider, false);
-    }
-
-    public static JobSpecification buildIndexUpdateOp(Dataset ds, Index index, List<ExternalFile> metadataFiles,
-            List<ExternalFile> deletedFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles,
-            AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
-        // Create files list
-        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
-
-        for (ExternalFile metadataFile : metadataFiles) {
-            if (metadataFile.getPendingOp() != ExternalFilePendingOp.PENDING_APPEND_OP) {
-                files.add(metadataFile);
-            } else {
-                metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
-                files.add(metadataFile);
-            }
-        }
-        // add new files
-        for (ExternalFile file : addedFiles) {
-            files.add(file);
-        }
-        // add appended files
-        for (ExternalFile file : appendedFiles) {
-            files.add(file);
-        }
-
-        CompiledCreateIndexStatement ccis = new CompiledCreateIndexStatement(index.getIndexName(),
-                index.getDataverseName(), index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
-                index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
-        return IndexOperations.buildSecondaryIndexLoadingJobSpec(ccis, null, null, metadataProvider, files);
-    }
-
-    public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
-            throws AlgebricksException, AsterixException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
-                metadataProvider.getMetadataTxnContext());
-        boolean temp = ds.getDatasetDetails().isTemp();
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                        getFilesIndexName(ds.getDatasetName()), temp);
-        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
-        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
-                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
-        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
-
-        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-
-        for (Index index : indexes) {
-            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
-                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
-                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                                index.getIndexName(), temp);
-                if (index.getIndexType() == IndexType.BTREE) {
-                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, spec));
-                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                } else if (index.getIndexType() == IndexType.RTREE) {
-                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
-                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                }
-            }
-        }
-
-        ExternalDatasetIndexesCommitOperatorDescriptor op = new ExternalDatasetIndexesCommitOperatorDescriptor(spec,
-                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
-                rtreeDataflowHelperFactories, rtreeInfos);
-
-        spec.addRoot(op);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
-                filesIndexSplitsAndConstraint.second);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    private static ExternalBTreeDataflowHelperFactory getFilesIndexDataflowHelperFactory(Dataset ds,
-            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
-            AsterixStorageProperties storageProperties, JobSpecification spec) {
-        return new ExternalBTreeDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
-                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
-    }
-
-    private static ExternalBTreeWithBuddyDataflowHelperFactory getBTreeDataflowHelperFactory(Dataset ds, Index index,
-            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
-            AsterixStorageProperties storageProperties, JobSpecification spec) {
-        return new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
-                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
-                storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
-    }
-
-    @SuppressWarnings("rawtypes")
-    private static ExternalRTreeDataflowHelperFactory getRTreeDataflowHelperFactory(Dataset ds, Index index,
-            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
-            AsterixStorageProperties storageProperties, AqlMetadataProvider metadataProvider, JobSpecification spec)
-                    throws AlgebricksException, AsterixException {
-        int numPrimaryKeys = getRIDSize(ds);
-        List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
-        secondaryKeyFields.size();
-        ARecordType itemType = (ARecordType) metadataProvider.findType(ds.getItemTypeDataverseName(),
-                ds.getItemTypeName());
-        Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(0), itemType);
-        IAType spatialType = spatialTypePair.first;
-        if (spatialType == null) {
-            throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
-        }
-        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
-        int numNestedSecondaryKeyFields = numDimensions * 2;
-        IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
-        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
-
-        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys
-                + numNestedSecondaryKeyFields];
-        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
-        IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
-        ATypeTag keyType = nestedKeyType.getTypeTag();
-
-        keyType = nestedKeyType.getTypeTag();
-        for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
-            ISerializerDeserializer keySerde = AqlSerializerDeserializerProvider.INSTANCE
-                    .getSerializerDeserializer(nestedKeyType);
-            secondaryRecFields[i] = keySerde;
-
-            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE
-                    .getBinaryComparatorFactory(nestedKeyType, true);
-            secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
-            valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
-        }
-        // Add serializers and comparators for primary index fields.
-        for (int i = 0; i < numPrimaryKeys; i++) {
-            secondaryRecFields[numNestedSecondaryKeyFields + i] = IndexingConstants.getSerializerDeserializer(i);
-            secondaryTypeTraits[numNestedSecondaryKeyFields + i] = IndexingConstants.getTypeTraits(i);
-        }
-        int[] primaryKeyFields = new int[numPrimaryKeys];
-        for (int i = 0; i < primaryKeyFields.length; i++) {
-            primaryKeyFields[i] = i + numNestedSecondaryKeyFields;
-        }
-
-        return new ExternalRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
-                getBuddyBtreeComparatorFactories(), mergePolicyFactory, mergePolicyFactoryProperties,
-                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
-                AqlMetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length),
-                storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
-    }
-
-    public static JobSpecification buildAbortOp(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
-            throws AlgebricksException, AsterixException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
-                metadataProvider.getMetadataTxnContext());
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-
-        boolean temp = ds.getDatasetDetails().isTemp();
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                        getFilesIndexName(ds.getDatasetName()), temp);
-        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
-        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
-                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
-        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
-
-        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-
-        for (Index index : indexes) {
-            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
-                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
-                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                                index.getIndexName(), temp);
-                if (index.getIndexType() == IndexType.BTREE) {
-                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, spec));
-                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                } else if (index.getIndexType() == IndexType.RTREE) {
-                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
-                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                }
-            }
-        }
-
-        ExternalDatasetIndexesAbortOperatorDescriptor op = new ExternalDatasetIndexesAbortOperatorDescriptor(spec,
-                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
-                rtreeDataflowHelperFactories, rtreeInfos);
-
-        spec.addRoot(op);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
-                filesIndexSplitsAndConstraint.second);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-
-    }
-
-    public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
-            throws AlgebricksException, AsterixException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
-                metadataProvider.getMetadataTxnContext());
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        boolean temp = ds.getDatasetDetails().isTemp();
-
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                        getFilesIndexName(ds.getDatasetName()), temp);
-        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
-        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
-                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
-        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
-
-        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
-        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
-
-        for (Index index : indexes) {
-            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
-                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
-                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
-                                index.getIndexName(), temp);
-                if (index.getIndexType() == IndexType.BTREE) {
-                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, spec));
-                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                } else if (index.getIndexType() == IndexType.RTREE) {
-                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
-                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
-                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
-                }
-            }
-        }
-
-        ExternalDatasetIndexesRecoverOperatorDescriptor op = new ExternalDatasetIndexesRecoverOperatorDescriptor(spec,
-                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
-                rtreeDataflowHelperFactories, rtreeInfos);
-
-        spec.addRoot(op);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
-                filesIndexSplitsAndConstraint.second);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-
-    public static JobSpecification compactFilesIndexJobSpec(Dataset dataset, AqlMetadataProvider metadataProvider)
-            throws MetadataException, AlgebricksException {
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
-        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
-        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
-                metadataProvider.getMetadataTxnContext());
-        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
-        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
-        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
-                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(), dataset.getDatasetName(),
-                        getFilesIndexName(dataset.getDatasetName()), true);
-        IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
-        ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
-                mergePolicyFactory, mergePolicyFactoryProperties,
-                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
-                storageProperties.getBloomFilterFalsePositiveRate(),
-                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
-        FilesIndexDescription filesIndexDescription = new FilesIndexDescription();
-        LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
-                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
-                secondaryFileSplitProvider, filesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
-                filesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, indexDataflowHelperFactory,
-                NoOpOperationCallbackFactory.INSTANCE);
-        spec.addRoot(compactOp);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
-                secondarySplitsAndConstraint.second);
-        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
-        return spec;
-    }
-}


[20/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm b/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm
new file mode 100644
index 0000000..3b2225d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser/classad-parser.1.adm
@@ -0,0 +1,100 @@
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#48968872.0#1445354636", "StatsLifetimeStarter": 572059, "JobStartDate": 1445362267, "SubmitEventNotes": "DAG Node: fabp4-0002+fabp4-0002", "JobStatus": 4, "LeaveJobInQueue": false, "WantGlidein": true, "StartdPrincipal": "execute-side@matchsession/128.104.119.175", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445561276, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "ScheddBday": 1445383086, "RemoteWallClockTime": 769511.0d, "WantCheckpoint": false, "In": "/dev/null", "LastVacateTime": 1445546251, "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 32543, "EnteredCurrentStatus": 1446133322, "ResidentSetSize_RAW": 100432, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/ssericksen/dude-14-xdock/ChtcRun/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID
 : 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 571737.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 690056, "BytesSent": 3113566.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133322, "ProcId": 0, "ImageSize": 750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 12, "RecentBlockReads": 0, "SpooledOutputFiles": "ChtcWrapperfabp4-0002.out,AuditLog.fabp4-0002,poses.mol2,CURLTIME_4057178,harvest.log,time_elapsed.log,surf_scores.txt,CURLTIME_38803,count.log,fabp4-0002.out,CURLTIME_253463", "NumJobReconnects": 1, "WantFlocking": true, "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "JobCurrentStartExecutingDate": 1445561278, "ExitBySignal": false, "LastMatch
 Time": 1445561276, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 6, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 48940805, "MemoryUsage": 122, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 6, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 572046, "ExecutableSize_RAW": 6, "LastRejMatchReason": "no match found", "LastSuspensionTime": 0, "UserLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/fabp4-0002/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 769511.0d, "LastJobLeaseRenewal": 1446133322, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 8.7351688E7d, "Condor
 Platform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "fabp4-0002+fabp4-0002", "PeriodicRelease": "error", "JobRunCount": 7, "LastRemotePool": "condor.biochem.wisc.edu:9618?sock=collector", "JobLastStartDate": 1445546257, "LastRemoteHost": "slot1@cluster-0008.biochem.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 0.0d, "TransferInput": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/fabp4-0002/,/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-in/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133322, "StreamErr": false, "is_resumable": true, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) 
 ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 7, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-surf-out-esr1/./mydag.dag.nodes.log", "Owner": "ssericksen", "Requirements": "undefined", "DiskUsage": 35000, "LastRejMatchTime": 1445375317, "JobLeaseDuration": 2400, "ClusterId": 48968872, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 572046.0d, "Args": "--type=Other --cmdtorun=surflex_run_DUDE_v1.8_esr1.sh --unique=fabp4-0002 --", "Environment": "", "LastPublicClaimId": "<128.104.119.175:9618>#1444067179#3317#...", "Iwd": "/home/ssericksen/dude-14-xdock/ChtcRun/dude14-sur
 f-out-esr1/fabp4-0002", "QDate": 1445354636, "CurrentHosts": 0, "User": "ssericksen@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49573720.0#1445938922", "StatsLifetimeStarter": 190245, "JobStartDate": 1445943852, "SubmitEventNotes": "DAG Node: 180+180", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.72", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1445943852, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 125000, "RemoteWallClockTime": 190247.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446134099, "ResidentSetSize_RAW": 123680, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 
 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 185236.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30766.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446134099, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1445943853, "ExitBySignal": false, "LastMatchTime": 1445943852, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49572657, "MemoryUsage": 122, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 190247, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally/Simulation_condor/model_3/180/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 190247.0d, "LastJobLeaseRenewal": 1446134099, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 284367.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "180+180", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e272.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 1835.0d, "TransferInput": "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446134099, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally/Simulation
 _condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49573720, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 190247.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.72:29075>#1444753997#6000#...", "Iwd": "/home/xguo23/finally/Simulation_condor/model_3/180", "QDate": 1445938922, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581952.0#1446105329", "StatsLifetimeStarter": 27674, "JobStartDate": 1446106061, "SubmitEventNotes": "DAG Node: 40+40", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.86", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106061, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27676.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133737, "ResidentSetSize_RAW": 127252, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 10
 23 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27510.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30584.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133737, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_369560,ChtcWrapper40.out,AuditLog.40,simu_3_40.txt,harvest.log,40.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106063, "ExitBySignal": false, "LastMatchTime": 1446106061, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( 
 JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27676, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/40/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27676.0d, "LastJobLeaseRenewal": 1446133737, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "40+40", "PeriodicReleas
 e": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e286.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 105.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/40/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133737, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor
 /model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581952, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27676.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=40 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.86:32129>#1444759888#6329#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/40", "QDate": 1446105329, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581985.0#1446105368", "StatsLifetimeStarter": 26354, "JobStartDate": 1446106289, "SubmitEventNotes": "DAG Node: 36+36", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.244.249", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106289, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26357.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132646, "ResidentSetSize_RAW": 127452, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 1
 023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26239.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31898.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132646, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1151700,ChtcWrapper36.out,AuditLog.36,simu_3_36.txt,harvest.log,36.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106289, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=
 ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26357, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/36/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26357.0d, "LastJobLeaseRenewal": 1446132646, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "36+36", "PeriodicRele
 ase": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e457.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/36/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132646, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condo
 r/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581985, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26357.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=36 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.244.249:28476>#1444685646#10655#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/36", "QDate": 1446105368, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49581989.0#1446105374", "StatsLifetimeStarter": 27490, "JobStartDate": 1446106290, "SubmitEventNotes": "DAG Node: 82+82", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.233", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106290, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 27491.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133781, "ResidentSetSize_RAW": 126932, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( 
 ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 27288.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30553.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partitio
 nableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,
 Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expect
 edMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_Jo
 bStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDra
 iningCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Resident
 SetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133782, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_4096502,ChtcWrapper82.out,AuditLog.82,simu_3_82.txt,harvest.log,82.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106291, "ExitBySignal": false, "LastMatchTime": 1446106290, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserC
 pu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27491, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/82/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27491.0d, "LastJobLeaseRenewal": 1446133781, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285053.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "82+82", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e433.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 173.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/82/,/home/xguo23/finally_2/Simulation_condor/data
 /shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133781, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49581989, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27491.0d, "Args":
  "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=82 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.233:28601>#1443991451#13496#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/82", "QDate": 1446105374, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582049.0#1446105441", "StatsLifetimeStarter": 26296, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 112+112", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.245", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26298.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132780, "ResidentSetSize_RAW": 126892, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26097.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31904.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132780, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2601607,ChtcWrapper112.out,AuditLog.112,simu_3_112.txt,harvest.log,112.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26298, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/112/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26298.0d, "LastJobLeaseRenewal": 1446132780, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "112+112", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e445.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 164.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/112/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132780, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582049, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26298.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=112 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.245:48407>#1443991450#14631#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/112", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582050.0#1446105441", "StatsLifetimeStarter": 27141, "JobStartDate": 1446106482, "SubmitEventNotes": "DAG Node: 301+301", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.172", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446106482, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 27143.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133625, "ResidentSetSize_RAW": 126464, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26895.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31905.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133625, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_2158419,ChtcWrapper301.out,AuditLog.301,simu_3_301.txt,harvest.log,301.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446106484, "ExitBySignal": false, "LastMatchTime": 1446106482, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 27143, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/301/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 27143.0d, "LastJobLeaseRenewal": 1446133625, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "301+301", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e372.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 201.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/301/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133625, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582050, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 27143.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=301 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.172:19856>#1444760019#9307#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/301", "QDate": 1446105441, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582065.0#1446105458", "StatsLifetimeStarter": 25606, "JobStartDate": 1446107042, "SubmitEventNotes": "DAG Node: 401+401", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.206", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107042, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 25607.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132649, "ResidentSetSize_RAW": 126608, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=
 ( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25478.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30661.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partit
 ionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBus
 y,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expe
 ctedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_
 JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulD
 rainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Reside
 ntSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132649, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1160521,ChtcWrapper401.out,AuditLog.401,simu_3_401.txt,harvest.log,401.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107043, "ExitBySignal": false, "LastMatchTime": 1446107042, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "Loca
 lUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25607, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/401/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25607.0d, "LastJobLeaseRenewal": 1446132649, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "401+401", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e406.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 89.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/401/,/home/xguo23/finally_2/Simulation_co
 ndor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132649, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582065, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25607.0d
 , "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=401 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.206:27946>#1443991437#15826#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/401", "QDate": 1446105458, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582094.0#1446105491", "StatsLifetimeStarter": 25168, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 106+106", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.104.55.83", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 75000, "RemoteWallClockTime": 25169.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 4, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132658, "ResidentSetSize_RAW": 72016, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize + 102
 3 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24949.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 119520, "BytesSent": 30486.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 1, "JobFinishedHookDone": 1446132658, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 86, "SpooledOutputFiles": "CURLTIME_122139,ChtcWrapper106.out,AuditLog.106,simu_3_106.txt,harvest.log,106.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107490, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 665, "DAGManJobId": 49581933, "MemoryUsage": 73, "PeriodicReleaseExpr": "exp
 r=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 26620, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 25169, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/106/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 25169.0d, "LastJobLeaseRenewal": 1446132658, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "106+106", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c064.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 204.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/106/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132658, "StreamErr": false, "RecentBlockReadKbytes": 960, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Sim
 ulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582094, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 25169.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=106 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.83:25899>#1445308581#1240#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/106", "QDate": 1446105491, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582098.0#1446105492", "StatsLifetimeStarter": 26020, "JobStartDate": 1446107489, "SubmitEventNotes": "DAG Node: 304+304", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.223", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107489, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26022.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133511, "ResidentSetSize_RAW": 128776, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25844.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 31801.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133511, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_3651606,ChtcWrapper304.out,AuditLog.304,simu_3_304.txt,harvest.log,304.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107489, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": 
 "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26022, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/304/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26022.0d, "LastJobLeaseRenewal": 1446133511, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "304+304", "Per
 iodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e423.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 143.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/304/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133511, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simul
 ation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582098, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26022.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=304 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.223:13467>#1444760039#6376#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/304", "QDate": 1446105492, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582113.0#1446105509", "StatsLifetimeStarter": 26044, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 206+206", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 26045.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133535, "ResidentSetSize_RAW": 126460, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25939.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30596.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133535, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_522843,ChtcWrapper206.out,AuditLog.206,simu_3_206.txt,harvest.log,206.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "
 expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26045, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/206/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26045.0d, "LastJobLeaseRenewal": 1446133535, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "206+206", "Peri
 odicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e320.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 87.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/206/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133535, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulat
 ion_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582113, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26045.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=206 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.120:45185>#1443991409#14238#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/206", "QDate": 1446105509, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582119.0#1446105519", "StatsLifetimeStarter": 24928, "JobStartDate": 1446107490, "SubmitEventNotes": "DAG Node: 152+152", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.242", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107490, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 24930.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132420, "ResidentSetSize_RAW": 128972, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 24742.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30431.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132420, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_674,ChtcWrapper152.out,AuditLog.152,simu_3_152.txt,harvest.log,152.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107491, "ExitBySignal": false, "LastMatchTime": 1446107490, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "exp
 r=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "LocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24930, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/152/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24930.0d, "LastJobLeaseRenewal": 1446132420, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "152+152", "Periodi
 cRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e442.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 156.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/152/,/home/xguo23/finally_2/Simulation_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132420, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulatio
 n_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582119, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 24930.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=152 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.242:38884>#1443991450#10374#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/152", "QDate": 1446105519, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582124.0#1446105525", "StatsLifetimeStarter": 24745, "JobStartDate": 1446107685, "SubmitEventNotes": "DAG Node: 323+323", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 13, "StartdPrincipal": "execute-side@matchsession/128.104.55.89", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107685, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 75000, "RemoteWallClockTime": 24748.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446132433, "ResidentSetSize_RAW": 71248, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( 
 ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 21145.0d, "BlockWrites": 1, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 118000, "BytesSent": 30560.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partitiona
 bleSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Sl
 ot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expected
 MachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobS
 tarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrain
 ingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSe
 tSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446132434, "ProcId": 0, "ImageSize": 125000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 314, "SpooledOutputFiles": "harvest.log,CURLTIME_3853266,ChtcWrapper323.out,AuditLog.323,simu_3_323.txt,323.out", "BlockWriteKbytes": 4, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107686, "ExitBySignal": false, "LastMatchTime": 1446107685, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 1142, "DAGManJobId": 49581933, "MemoryUsage": 73, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 43788, "NumJobMatches": 1, "L
 ocalUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 24748, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/323/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 24748.0d, "LastJobLeaseRenewal": 1446132433, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "323+323", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@c070.chtc.wisc.edu", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 175.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/323/,/home/xguo23/finally_2/Simulatio
 n_condor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446132433, "StreamErr": false, "RecentBlockReadKbytes": 4224, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582124, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 2
 4748.0d, "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=323 -- 3", "Environment": "", "LastPublicClaimId": "<128.104.55.89:32652>#1445371750#1302#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/323", "QDate": 1446105525, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582148.0#1446105547", "StatsLifetimeStarter": 26230, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 162+162", "JobStatus": 4, "LeaveJobInQueue": false, "AutoClusterId": 24, "StartdPrincipal": "execute-side@matchsession/128.105.245.170", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "ExitStatus": 0, "Rank": 0.0d, "ResidentSetSize": 150000, "RemoteWallClockTime": 26233.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133919, "ResidentSetSize_RAW": 126384, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=
 ( ( ResidentSetSize + 1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 26088.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30612.0d, "CumulativeSuspensionTime": 0, "TransferIn": false, "AutoClusterAttrs": "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,Partit
 ionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDrainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBus
 y,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,Slot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_Expe
 ctedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Requestmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_
 JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobStarts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulD
 rainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainingCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,Reside
 ntSetSize", "NumCkpts": 0, "Err": "process.err", "RecentBlockWrites": 0, "JobFinishedHookDone": 1446133919, "ProcId": 0, "ImageSize": 1000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "TransferInputSizeMB": 0, "RecentBlockReads": 0, "SpooledOutputFiles": "CURLTIME_1114551,ChtcWrapper162.out,AuditLog.162,simu_3_162.txt,harvest.log,162.out", "BlockWriteKbytes": 0, "WhenToTransferOutput": "ON_EXIT", "JobCurrentStartExecutingDate": 1446107688, "ExitBySignal": false, "LastMatchTime": 1446107686, "OnExitHold": false, "OrigMaxHosts": 1, "RequestMemory": 1000, "NumJobStarts": 1, "TerminationPending": true, "TotalSuspensions": 0, "BlockReads": 0, "DAGManJobId": 49581933, "MemoryUsage": 146, "PeriodicReleaseExpr": "expr=( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); ", "ExitCode": 0, "JobNotification": 0, "BlockReadKbytes": 0, "NumJobMatches": 1, "Loca
 lUserCpu": 0.0d, "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 26233, "ExecutableSize_RAW": 6, "LastSuspensionTime": 0, "Matlab": "R2011b", "UserLog": "/home/xguo23/finally_2/Simulation_condor/model_3/162/process.log", "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "CumulativeSlotTime": 26233.0d, "LastJobLeaseRenewal": 1446133919, "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "BytesRecvd": 285054.0d, "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "JOBGLIDEIN_ResourceName": "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])", "DAGNodeName": "162+162", "PeriodicRelease": "error", "JobRunCount": 1, "LastRemoteHost": "slot1@e370.chtc.WISC.EDU", "JobPrio": 0, "LocalSysCpu": 0.0d, "ExecutableSize": 7, "RemoteSysCpu": 96.0d, "TransferInput": "/home/xguo23/finally_2/Simulation_condor/data/162/,/home/xguo23/finally_2/Simulation_co
 ndor/data/shared/", "PeriodicHold": false, "WantRemoteIO": true, "CommittedSuspensionTime": 0, "DAGParentNodeNames": "", "CompletionDate": 1446133919, "StreamErr": false, "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "RequirementsExpr": "expr=( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == \"CHTC\" || TARGET.COLLECTOR_HOST_STRING == \"infopool.cs.wisc.edu\" ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); ", "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DAGManNodesLog": "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log", "Owner": "xguo23", "Requirements": "undefined", "DiskUsage": 1250000, "JobLeaseDuration": 2400, "ClusterId": 49582148, "BufferSize": 524288, "IsCHTCSubmit": true, "RecentStatsLifetimeStarter": 1200, "CommittedSlotTime": 26233.0d
 , "Args": "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=162 -- 3", "Environment": "", "LastPublicClaimId": "<128.105.245.170:9482>#1443991414#13008#...", "Iwd": "/home/xguo23/finally_2/Simulation_condor/model_3/162", "QDate": 1446105547, "CurrentHosts": 0, "User": "xguo23@chtc.wisc.edu", "StreamOut": false }
+{ "GlobalJobId": "submit-3.chtc.wisc.edu#49582154.0#1446105553", "StatsLifetimeStarter": 25874, "JobStartDate": 1446107686, "SubmitEventNotes": "DAG Node: 333+333", "JobStatus": 4, "LeaveJobInQueue": false, "StartdPrincipal": "execute-side@matchsession/128.105.245.120", "WantRHEL6": true, "OnExitRemove": true, "JobCurrentStartDate": 1446107686, "CoreSize": 0, "MATCH_EXP_JOBGLIDEIN_ResourceName": "wisc.edu", "Rank": 0.0d, "ExitStatus": 0, "ResidentSetSize": 150000, "RemoteWallClockTime": 25876.0d, "WantCheckpoint": false, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "RecentBlockWriteKbytes": 0, "DiskUsage_RAW": 1216669, "EnteredCurrentStatus": 1446133562, "ResidentSetSize_RAW": 125740, "RequestDisk": 1000000, "MyType": "Job", "PeriodicRemove": false, "Cmd": "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper", "CondorVersion": "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $", "ShouldTransferFiles": "YES", "MemoryUsageExpr": "expr=( ( ResidentSetSize +
  1023 ) / 1024 ); ", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "RemoteUserCpu": 25692.0d, "BlockWrites": 0, "NiceUser": false, "Out": "process.out", "ImageSize_RAW": 811948, "BytesSent": 30542.0d, "CumulativeSuspensionTime": 0, "Trans

<TRUNCATED>


[12/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Lexer.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Lexer.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Lexer.java
new file mode 100644
index 0000000..97b7ea8
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Lexer.java
@@ -0,0 +1,962 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.classad.Value.NumberFactor;
+
+public class Lexer {
+
+    public static final char[] TRUE_CHAR_ARRAY = "true".toCharArray();
+    public static final char[] FALSE_CHAR_ARRAY = "false".toCharArray();
+    public static final char[] UNDEFINED_CHAR_ARRAY = "undefined".toCharArray();
+    public static final char[] ERROR_CHAR_ARRAY = "error".toCharArray();
+    public static final char[] IS_CHAR_ARRAY = "is".toCharArray();
+    public static final char[] ISNT_CHAR_ARRAY = "isnt".toCharArray();
+
+    public static final char EOF = (char) -1;
+    // internal state of lexical analyzer
+    protected boolean initialized;
+    private TokenType tokenType; // the integer id of the token
+    private LexerSource lexSource;
+    private char ch; // the current character
+    private boolean accumulating; // are we in a token?
+    private final boolean debug = false; // debug flag
+    // internal buffer for token accumulation
+    private AMutableCharArrayString lexBuffer;
+
+    // cached last token
+    private TokenValue yylval; // the token itself
+    private boolean tokenConsumed; // has the token been consumed?
+
+    public enum TokenType {
+        LEX_TOKEN_ERROR,
+        LEX_END_OF_INPUT,
+        LEX_TOKEN_TOO_LONG,
+        LEX_INTEGER_VALUE,
+        LEX_REAL_VALUE,
+        LEX_BOOLEAN_VALUE,
+        LEX_STRING_VALUE,
+        LEX_UNDEFINED_VALUE,
+        LEX_ERROR_VALUE,
+        LEX_IDENTIFIER,
+        LEX_SELECTION,
+        LEX_MULTIPLY,
+        LEX_DIVIDE,
+        LEX_MODULUS,
+        LEX_PLUS,
+        LEX_MINUS,
+        LEX_BITWISE_AND,
+        LEX_BITWISE_OR,
+        LEX_BITWISE_NOT,
+        LEX_BITWISE_XOR,
+        LEX_LEFT_SHIFT,
+        LEX_RIGHT_SHIFT,
+        LEX_URIGHT_SHIFT,
+        LEX_LOGICAL_AND,
+        LEX_LOGICAL_OR,
+        LEX_LOGICAL_NOT,
+        LEX_LESS_THAN,
+        LEX_LESS_OR_EQUAL,
+        LEX_GREATER_THAN,
+        LEX_GREATER_OR_EQUAL,
+        LEX_EQUAL,
+        LEX_NOT_EQUAL,
+        LEX_META_EQUAL,
+        LEX_META_NOT_EQUAL,
+        LEX_BOUND_TO,
+        LEX_QMARK,
+        LEX_COLON,
+        LEX_COMMA,
+        LEX_SEMICOLON,
+        LEX_OPEN_BOX,
+        LEX_CLOSE_BOX,
+        LEX_OPEN_PAREN,
+        LEX_CLOSE_PAREN,
+        LEX_OPEN_BRACE,
+        LEX_CLOSE_BRACE,
+        LEX_BACKSLASH,
+        LEX_ABSOLUTE_TIME_VALUE,
+        LEX_RELATIVE_TIME_VALUE
+    };
+
+    public Lexer() {
+        // initialize lexer state (token, etc.) variables
+        tokenType = TokenType.LEX_END_OF_INPUT;
+        ch = 0;
+        tokenConsumed = true;
+        accumulating = false;
+        initialized = false;
+        yylval = new TokenValue();
+        return;
+    }
+
+    // Initialization method:  Initialize with immutable string
+    //   +  Token will be accumulated in the lexBuffer
+    public boolean initialize(LexerSource source) throws IOException {
+        lexSource = source;
+        ch = lexSource.readCharacter();
+        // token state initialization
+        if (lexBuffer != null) {
+            lexBuffer.reset();
+        } else {
+            lexBuffer = new AMutableCharArrayString();
+        }
+        lexBuffer.setChar(0, ch);
+        lexBuffer.setLength(0);
+        //lexBufferCount = 0;
+        tokenConsumed = true;
+        accumulating = false;
+        initialized = true;
+        return true;
+    }
+
+    public boolean reinitialize() throws IOException {
+        ch = lexSource.readCharacter();
+        // token state initialization
+        lexBuffer.setChar(0, ch);
+        lexBuffer.setLength(0);
+        tokenConsumed = true;
+        accumulating = false;
+        return true;
+    }
+
+    public boolean wasInitialized() {
+        return initialized;
+    }
+
+    // FinishedParse:  This function implements the cleanup phase of a parse.
+    //   String valued tokens are entered into a string space, and maintained
+    //   with reference counting.  When a parse is finished, this space is flushed
+    //   out.
+    public void finishedParse() {
+        accumulating = false;
+        return;
+    }
+
+    // Mark:  This function is called when the beginning of a token is detected
+    public void mark() {
+        lexBuffer.setChar(0, ch);
+        lexBuffer.setLength(1);
+        accumulating = true;
+        return;
+    }
+
+    // Cut:  This function is called when the end of a token is detected
+    public void cut() {
+        lexBuffer.decrementLength();
+        accumulating = false;
+        return;
+    }
+
+    // Wind:  This function is called when an additional character must be read
+    //            from the input source; the conceptual action is to move the cursor
+    public void wind() throws IOException {
+        if (ch == (char) -1) {
+            if (accumulating) {
+                lexBuffer.incrementLength();
+            }
+            return;
+        }
+        ch = lexSource.readCharacter();
+        if (ch == (char) -1) {
+            if (accumulating) {
+                lexBuffer.incrementLength();
+            }
+            return;
+        }
+        if (accumulating) {
+            lexBuffer.appendChar(ch);
+        }
+    }
+
+    public TokenType consumeToken() throws IOException {
+        return consumeToken(null);
+    }
+
+    public TokenType consumeToken(TokenValue lvalp) throws IOException {
+        if (lvalp != null) {
+            lvalp.copyFrom(yylval);
+        }
+        // if a token has already been consumed, get another token
+        if (tokenConsumed) {
+            peekToken(lvalp);
+        }
+        if (debug) {
+            System.out.printf("Consume: %s\n", strLexToken(tokenType));
+        }
+
+        tokenConsumed = true;
+        return tokenType;
+    }
+
+    private boolean isxdigit(char ch) {
+        return Character.isDigit(ch) || isLowerCaseHexaAlpha(ch) || isUppserCaseHexaAlpha(ch);
+    }
+
+    private boolean isUppserCaseHexaAlpha(char ch) {
+        return ch >= 'a' && ch <= 'f';
+    }
+
+    private boolean isLowerCaseHexaAlpha(char ch2) {
+        return ch >= 'A' && ch <= 'F';
+    }
+
+    public TokenType peekToken() throws IOException {
+        return peekToken(null);
+    }
+
+    // peekToken() returns the same token till consumeToken() is called
+    public TokenType peekToken(TokenValue lvalp) throws IOException {
+        /*if (lvalp == null) {
+            System.err.println("Null value passed to peekToken");
+            return null;
+        }*/
+        if (!tokenConsumed) {
+
+            if (lvalp != null) {
+                lvalp.copyFrom(yylval);
+            }
+            return tokenType;
+        }
+
+        // Set the token to unconsumed
+        tokenConsumed = false;
+
+        // consume white space
+        while (true) {
+            if (Character.isWhitespace(ch)) {
+                wind();
+                continue;
+            } else if (ch == '/') {
+                mark();
+                wind();
+                if (ch == '/') {
+                    // a c++ style comment
+                    while (ch > 0 && ch != '\n') {
+                        wind();
+                    }
+                } else if (ch == '*') {
+                    // a c style comment
+                    int oldCh;
+                    ch = '\n';
+                    do {
+                        oldCh = ch;
+                        wind();
+                    } while ((oldCh != '*' || ch != '/') && (ch > 0));
+                    if (ch == EOF) {
+                        tokenType = TokenType.LEX_TOKEN_ERROR;
+                        return (tokenType);
+                    }
+                    wind();
+                } else {
+                    // just a division operator
+                    cut();
+                    tokenType = TokenType.LEX_DIVIDE;
+                    yylval.setTokenType(tokenType);
+                    return (tokenType);
+                }
+            } else {
+                break; // out of while( true ) loop
+            }
+        }
+
+        // check if this is the end of the input
+        if (ch == EOF) {
+            tokenType = TokenType.LEX_END_OF_INPUT;
+            yylval.setTokenType(tokenType);
+            return tokenType;
+        }
+
+        // check the first character of the token
+        if (ch == '-') {
+            // Depending on the last token we saw, a minus may be the start
+            // of an integer or real token. tokenizeNumber() does the right
+            // thing if there is no subsequent integer or real.
+            switch (tokenType) {
+                case LEX_INTEGER_VALUE:
+                case LEX_REAL_VALUE:
+                case LEX_BOOLEAN_VALUE:
+                case LEX_STRING_VALUE:
+                case LEX_UNDEFINED_VALUE:
+                case LEX_ERROR_VALUE:
+                case LEX_IDENTIFIER:
+                case LEX_SELECTION:
+                case LEX_CLOSE_BOX:
+                case LEX_CLOSE_PAREN:
+                case LEX_CLOSE_BRACE:
+                case LEX_BACKSLASH:
+                case LEX_ABSOLUTE_TIME_VALUE:
+                case LEX_RELATIVE_TIME_VALUE:
+                    tokenizePunctOperator();
+                    break;
+                default:
+                    tokenizeNumber();
+                    break;
+            }
+        } else if (Character.isDigit(ch) || ch == '.') {
+            // tokenizeNumber() also takes care of the selection operator
+            tokenizeNumber();
+
+        } else if (Character.isAlphabetic(ch) || ch == '_') {
+            tokenizeAlphaHead();
+        } else if (ch == '\"') {
+            tokenizeString('\"'); // its a string literal
+        } else if (ch == '\'') {
+            tokenizeString('\''); // its a quoted attribute
+        }
+
+        else {
+            tokenizePunctOperator();
+        }
+
+        if (debug) {
+            System.out.printf("Peek: %s\n", strLexToken(tokenType));
+            if (tokenType == TokenType.LEX_ERROR_VALUE) {
+                System.out.println("Lexer problem");
+            }
+        }
+        if (lvalp != null) {
+            lvalp.copyFrom(yylval);
+        }
+        yylval.setTokenType(tokenType);
+        return tokenType;
+    }
+
+    // Tokenize number constants:
+    //   1.  Integers:  [-] 0[0-7]+ | 0[xX][0-9a-fA-F]+ | [0-9]+
+    //   2.  Reals   :  [-] [0-9]*\.[0-9]* (e|E) [+-]? [0-9]+
+    enum NumberType {
+        NONE,
+        INTEGER,
+        REAL
+    };
+
+    public TokenType tokenizeNumber() throws IOException {
+        NumberType numberType = NumberType.NONE;
+        NumberFactor f;
+        long integer = 0;
+        double real = 0;
+        int och;
+
+        och = ch;
+        mark();
+        wind();
+
+        if (och == '-') {
+            // This may be a negative number or the unary minus operator
+            // The subsequent two characters will tell us which.
+            if (Character.isDigit(ch)) {
+                // It looks like a negative number, keep reading.
+                och = ch;
+                wind();
+            } else if (ch == '.') {
+                // This could be a real number or an attribute reference
+                // starting with dot. Look at the second character.
+                int ch2 = lexSource.readCharacter();
+                if (ch2 >= 0) {
+                    lexSource.unreadCharacter();
+                }
+                if (!Character.isDigit(ch2)) {
+                    // It's not a real number, return a minus token.
+                    cut();
+                    tokenType = TokenType.LEX_MINUS;
+                    return tokenType;
+                }
+                // It looks like a negative real, keep reading.
+            } else {
+                // It's not a number, return a minus token.
+                cut();
+                tokenType = TokenType.LEX_MINUS;
+                return tokenType;
+            }
+        }
+
+        if (och == '0') {
+            // number is octal, hex or real
+            if (Character.toLowerCase(ch) == 'x') {
+                // get hex digits only; parse hex-digit+
+                numberType = NumberType.INTEGER;
+                wind();
+                if (!isxdigit(ch)) {
+                    cut();
+                    tokenType = TokenType.LEX_TOKEN_ERROR;
+                    return (tokenType);
+                }
+                while (isxdigit(ch)) {
+                    wind();
+                }
+            } else {
+                // get octal or real
+                numberType = NumberType.INTEGER;
+                while (Character.isDigit(ch)) {
+                    wind();
+                    if (!isodigit(ch)) {
+                        // not an octal number
+                        numberType = NumberType.REAL;
+                    }
+                }
+                if (ch == '.' || Character.toLowerCase(ch) == 'e') {
+                    numberType = NumberType.REAL;
+                } else if (numberType == NumberType.REAL) {
+                    // non-octal digits, but not a real (no '.' or 'e')
+                    // so, illegal octal constant
+                    cut();
+                    tokenType = TokenType.LEX_TOKEN_ERROR;
+                    return (tokenType);
+                }
+            }
+        } else if (Character.isDigit(och)) {
+            // decimal or real; get digits
+            while (Character.isDigit(ch)) {
+                wind();
+            }
+            numberType = (ch == '.' || Character.toLowerCase(ch) == 'e') ? NumberType.REAL : NumberType.INTEGER;
+        }
+
+        if (och == '.' || ch == '.') {
+            // fraction part of real or selection operator
+            if (ch == '.')
+                wind();
+            if (Character.isDigit(ch)) {
+                // real; get digits after decimal point
+                numberType = NumberType.REAL;
+                while (Character.isDigit(ch)) {
+                    wind();
+                }
+            } else {
+                if (numberType != NumberType.NONE) {
+                    // initially like a number, but no digit following the '.'
+                    cut();
+                    tokenType = TokenType.LEX_TOKEN_ERROR;
+                    return (tokenType);
+                }
+                // selection operator
+                cut();
+                tokenType = TokenType.LEX_SELECTION;
+                return (tokenType);
+            }
+        }
+
+        // if we are tokenizing a real, the (optional) exponent part is left
+        //   i.e., [eE][+-]?[0-9]+
+        if (numberType == NumberType.REAL && Character.toLowerCase(ch) == 'e') {
+            wind();
+            if (ch == '+' || ch == '-')
+                wind();
+            if (!Character.isDigit(ch)) {
+                cut();
+                tokenType = TokenType.LEX_TOKEN_ERROR;
+                return (tokenType);
+            }
+            while (Character.isDigit(ch)) {
+                wind();
+            }
+        }
+
+        if (numberType == NumberType.INTEGER) {
+            cut();
+            integer = Long.parseLong(lexBuffer.toString());
+        } else if (numberType == NumberType.REAL) {
+            cut();
+            real = Double.parseDouble(lexBuffer.toString());
+        } else {
+            /* If we've reached this point, we have a serious programming
+             * error: tokenizeNumber should only be called if we are
+             * lexing a number or a selection, and we didn't find a number
+             * or a selection. This should really never happen, so we
+             * bomb if it does. It should be reported as a bug.
+             */
+            throw new IOException("Should not reach here");
+        }
+
+        switch (Character.toUpperCase(ch)) {
+            case 'B':
+                f = NumberFactor.B_FACTOR;
+                wind();
+                break;
+            case 'K':
+                f = NumberFactor.K_FACTOR;
+                wind();
+                break;
+            case 'M':
+                f = NumberFactor.M_FACTOR;
+                wind();
+                break;
+            case 'G':
+                f = NumberFactor.G_FACTOR;
+                wind();
+                break;
+            case 'T':
+                f = NumberFactor.T_FACTOR;
+                wind();
+                break;
+            default:
+                f = NumberFactor.NO_FACTOR;
+        }
+
+        if (numberType == NumberType.INTEGER) {
+            yylval.setIntValue(integer, f);
+            yylval.setTokenType(TokenType.LEX_INTEGER_VALUE);
+            tokenType = TokenType.LEX_INTEGER_VALUE;
+        } else {
+            yylval.setRealValue(real, f);
+            yylval.setTokenType(TokenType.LEX_REAL_VALUE);
+            tokenType = TokenType.LEX_REAL_VALUE;
+        }
+
+        return (tokenType);
+    }
+
+    public static boolean isodigit(char ch) {
+        return ch >= '0' && ch <= '7';
+    }
+
+    // Tokenize alpha head: (character sequences beggining with an alphabet)
+    //   1.  Reserved character sequences:  true, false, error, undefined
+    //   2.  Identifier                  :  [a-zA-Z_][a-zA-Z0-9_]*
+    public TokenType tokenizeAlphaHead() throws IOException {
+        mark();
+        while (Character.isAlphabetic(ch)) {
+            wind();
+        }
+        if (Character.isDigit(ch) || ch == '_') {
+            // The token is an identifier; consume the rest of the token
+            wind();
+            while (Character.isAlphabetic(ch) || Character.isDigit(ch) || ch == '_') {
+                wind();
+            }
+            cut();
+            tokenType = TokenType.LEX_IDENTIFIER;
+            yylval.setStringValue(lexBuffer);
+            return tokenType;
+        }
+
+        // check if the string is one of the reserved words; Case insensitive
+        cut();
+        if (isEqualIgnoreCase(TRUE_CHAR_ARRAY)) {
+            tokenType = TokenType.LEX_BOOLEAN_VALUE;
+            yylval.setBoolValue(true);
+        } else if (isEqualIgnoreCase(FALSE_CHAR_ARRAY)) {
+            tokenType = TokenType.LEX_BOOLEAN_VALUE;
+            yylval.setBoolValue(false);
+        } else if (isEqualIgnoreCase(UNDEFINED_CHAR_ARRAY)) {
+            tokenType = TokenType.LEX_UNDEFINED_VALUE;
+        } else if (isEqualIgnoreCase(ERROR_CHAR_ARRAY)) {
+            tokenType = TokenType.LEX_ERROR_VALUE;
+        } else if (isEqualIgnoreCase(IS_CHAR_ARRAY)) {
+            tokenType = TokenType.LEX_META_EQUAL;
+        } else if (isEqualIgnoreCase(ISNT_CHAR_ARRAY)) {
+            tokenType = TokenType.LEX_META_NOT_EQUAL;
+        } else {
+            // token is a character only identifier
+            tokenType = TokenType.LEX_IDENTIFIER;
+            yylval.setStringValue(lexBuffer);
+        }
+        return tokenType;
+    }
+
+    private boolean isEqualIgnoreCase(char[] compareTo) {
+        return lexBuffer.isEqualsIgnoreCaseLower(compareTo);
+    }
+
+    // tokenizeStringLiteral:  Scans strings of the form " ... " or '...'
+    // based on whether the argument passed was '\"' or '\''
+    public TokenType tokenizeString(char delim) throws IOException {
+        boolean stringComplete = false;
+
+        // need to mark() after the quote
+        wind();
+        mark();
+
+        while (!stringComplete) {
+            boolean oddBackWhacks = false;
+            char oldCh = 0;
+            // consume the string literal; read upto " ignoring \"
+            while ((ch > 0) && (ch != delim || (ch == delim && oldCh == '\\' && oddBackWhacks))) {
+                if (!oddBackWhacks && ch == '\\') {
+                    oddBackWhacks = true;
+                } else {
+                    oddBackWhacks = false;
+                }
+                oldCh = ch;
+                wind();
+            }
+
+            if (ch == delim) {
+                char tempch = ' ';
+                // read past the whitespace characters
+                while (Character.isWhitespace(tempch)) {
+                    tempch = lexSource.readCharacter();
+                }
+                if (tempch != delim) { // a new token exists after the string
+                    if (tempch != EOF) {
+                        lexSource.unreadCharacter();
+                    }
+                    stringComplete = true;
+                } else { // the adjacent string is to be concatenated to the existing string
+                    lexBuffer.erase(lexBuffer.getLength());// erase the lagging '\"'
+                    wind();
+                }
+            } else {
+                // loop quit due to ch == 0 or ch == EOF
+                tokenType = TokenType.LEX_TOKEN_ERROR;
+                return tokenType;
+            }
+        }
+        cut();
+        wind(); // skip over the close quote
+        boolean validStr = true; // to check if string is valid after converting escape
+        validStr = Util.convertEscapes(lexBuffer);
+        yylval.setStringValue(lexBuffer);
+        if (validStr) {
+            if (delim == '\"') {
+                tokenType = TokenType.LEX_STRING_VALUE;
+            } else {
+                tokenType = TokenType.LEX_IDENTIFIER;
+            }
+        } else {
+            tokenType = TokenType.LEX_TOKEN_ERROR; // string conatins a '\0' character inbetween
+        }
+
+        return tokenType;
+    }
+
+    // tokenizePunctOperator:  Tokenize puncutation and operators
+    public TokenType tokenizePunctOperator() throws IOException {
+        // save character; may need to lookahead
+        char oldch = ch;
+        char extra_lookahead;
+
+        mark();
+        wind();
+        switch (oldch) {
+            // these cases don't need lookaheads
+            case '.':
+                tokenType = TokenType.LEX_SELECTION;
+                break;
+
+            case '*':
+                tokenType = TokenType.LEX_MULTIPLY;
+                break;
+
+            case '/':
+                tokenType = TokenType.LEX_DIVIDE;
+                break;
+
+            case '%':
+                tokenType = TokenType.LEX_MODULUS;
+                break;
+
+            case '+':
+                tokenType = TokenType.LEX_PLUS;
+                break;
+
+            case '-':
+                tokenType = TokenType.LEX_MINUS;
+                break;
+
+            case '~':
+                tokenType = TokenType.LEX_BITWISE_NOT;
+                break;
+
+            case '^':
+                tokenType = TokenType.LEX_BITWISE_XOR;
+                break;
+
+            case '?':
+                tokenType = TokenType.LEX_QMARK;
+                break;
+
+            case ':':
+                tokenType = TokenType.LEX_COLON;
+                break;
+
+            case ';':
+                tokenType = TokenType.LEX_SEMICOLON;
+                break;
+
+            case ',':
+                tokenType = TokenType.LEX_COMMA;
+                break;
+
+            case '[':
+                tokenType = TokenType.LEX_OPEN_BOX;
+                break;
+
+            case ']':
+                tokenType = TokenType.LEX_CLOSE_BOX;
+                break;
+
+            case '(':
+                tokenType = TokenType.LEX_OPEN_PAREN;
+                break;
+
+            case ')':
+                tokenType = TokenType.LEX_CLOSE_PAREN;
+                break;
+
+            case '{':
+                tokenType = TokenType.LEX_OPEN_BRACE;
+                break;
+
+            case '}':
+                tokenType = TokenType.LEX_CLOSE_BRACE;
+                break;
+
+            // the following cases need lookaheads
+
+            case '&':
+                tokenType = TokenType.LEX_BITWISE_AND;
+                if (ch == '&') {
+                    tokenType = TokenType.LEX_LOGICAL_AND;
+                    wind();
+                }
+                break;
+
+            case '|':
+                tokenType = TokenType.LEX_BITWISE_OR;
+                if (ch == '|') {
+                    tokenType = TokenType.LEX_LOGICAL_OR;
+                    wind();
+                }
+                break;
+
+            case '<':
+                tokenType = TokenType.LEX_LESS_THAN;
+                switch (ch) {
+                    case '=':
+                        tokenType = TokenType.LEX_LESS_OR_EQUAL;
+                        wind();
+                        break;
+
+                    case '<':
+                        tokenType = TokenType.LEX_LEFT_SHIFT;
+                        wind();
+                        break;
+
+                    default:
+                        // just the '<' --- no need to do anything
+                        break;
+                }
+                break;
+
+            case '>':
+                tokenType = TokenType.LEX_GREATER_THAN;
+                switch (ch) {
+                    case '=':
+                        tokenType = TokenType.LEX_GREATER_OR_EQUAL;
+                        wind();
+                        break;
+
+                    case '>':
+                        tokenType = TokenType.LEX_RIGHT_SHIFT;
+                        wind();
+                        if (ch == '>') {
+                            tokenType = TokenType.LEX_URIGHT_SHIFT;
+                            wind();
+                        }
+                        break;
+
+                    default:
+                        // just the '>' --- no need to do anything
+                        break;
+                }
+                break;
+
+            case '=':
+                tokenType = TokenType.LEX_BOUND_TO;
+                switch (ch) {
+                    case '=':
+                        tokenType = TokenType.LEX_EQUAL;
+                        wind();
+                        break;
+
+                    case '?':
+                        tokenType = TokenType.LEX_META_EQUAL;
+                        wind();
+
+                        // ensure the trailing '=' of the '=?=' combination
+                        if (ch != '=') {
+                            tokenType = TokenType.LEX_TOKEN_ERROR;
+                            return tokenType;
+                        }
+
+                        wind();
+                        break;
+
+                    case '!':
+                        extra_lookahead = lexSource.readCharacter();
+                        lexSource.unreadCharacter();
+                        if (extra_lookahead == '=') {
+                            tokenType = TokenType.LEX_META_NOT_EQUAL;
+                            wind();
+                            wind();
+                        }
+                        break;
+
+                    default:
+                        // just the '=' --- no need to do anything
+                        break;
+                }
+                break;
+
+            case '!':
+                tokenType = TokenType.LEX_LOGICAL_NOT;
+                switch (ch) {
+                    case '=':
+                        tokenType = TokenType.LEX_NOT_EQUAL;
+                        wind();
+                        break;
+
+                    default:
+                        // just the '!' --- no need to do anything
+                        break;
+                }
+                break;
+
+            default:
+                tokenType = TokenType.LEX_TOKEN_ERROR;
+                return tokenType;
+        }
+
+        // cut the token and return
+        cut();
+        return tokenType;
+    }
+
+    // strLexToken:  Return string representation of token type
+    public static String strLexToken(TokenType tokenType) {
+        switch (tokenType) {
+            case LEX_END_OF_INPUT:
+                return "LEX_END_OF_INPUT";
+            case LEX_TOKEN_ERROR:
+                return "LEX_TOKEN_ERROR";
+            case LEX_TOKEN_TOO_LONG:
+                return "LEX_TOKEN_TOO_LONG";
+
+            case LEX_INTEGER_VALUE:
+                return "LEX_INTEGER_VALUE";
+            case LEX_REAL_VALUE:
+                return "LEX_REAL_VALUE";
+            case LEX_BOOLEAN_VALUE:
+                return "LEX_BOOLEAN_VALUE";
+            case LEX_STRING_VALUE:
+                return "LEX_STRING_VALUE";
+            case LEX_UNDEFINED_VALUE:
+                return "LEX_UNDEFINED_VALUE";
+            case LEX_ERROR_VALUE:
+                return "LEX_ERROR_VALUE";
+
+            case LEX_IDENTIFIER:
+                return "LEX_IDENTIFIER";
+            case LEX_SELECTION:
+                return "LEX_SELECTION";
+
+            case LEX_MULTIPLY:
+                return "LEX_MULTIPLY";
+            case LEX_DIVIDE:
+                return "LEX_DIVIDE";
+            case LEX_MODULUS:
+                return "LEX_MODULUS";
+            case LEX_PLUS:
+                return "LEX_PLUS";
+            case LEX_MINUS:
+                return "LEX_MINUS";
+
+            case LEX_BITWISE_AND:
+                return "LEX_BITWISE_AND";
+            case LEX_BITWISE_OR:
+                return "LEX_BITWISE_OR";
+            case LEX_BITWISE_NOT:
+                return "LEX_BITWISE_NOT";
+            case LEX_BITWISE_XOR:
+                return "LEX_BITWISE_XOR";
+
+            case LEX_LEFT_SHIFT:
+                return "LEX_LEFT_SHIFT";
+            case LEX_RIGHT_SHIFT:
+                return "LEX_RIGHT_SHIFT";
+            case LEX_URIGHT_SHIFT:
+                return "LEX_URIGHT_SHIFT";
+
+            case LEX_LOGICAL_AND:
+                return "LEX_LOGICAL_AND";
+            case LEX_LOGICAL_OR:
+                return "LEX_LOGICAL_OR";
+            case LEX_LOGICAL_NOT:
+                return "LEX_LOGICAL_NOT";
+
+            case LEX_LESS_THAN:
+                return "LEX_LESS_THAN";
+            case LEX_LESS_OR_EQUAL:
+                return "LEX_LESS_OR_EQUAL";
+            case LEX_GREATER_THAN:
+                return "LEX_GREATER_THAN";
+            case LEX_GREATER_OR_EQUAL:
+                return "LEX_GREATER_OR_EQUAL";
+            case LEX_EQUAL:
+                return "LEX_EQUAL";
+            case LEX_NOT_EQUAL:
+                return "LEX_NOT_EQUAL";
+            case LEX_META_EQUAL:
+                return "LEX_META_EQUAL";
+            case LEX_META_NOT_EQUAL:
+                return "LEX_META_NOT_EQUAL";
+
+            case LEX_BOUND_TO:
+                return "LEX_BOUND_TO";
+
+            case LEX_QMARK:
+                return "LEX_QMARK";
+            case LEX_COLON:
+                return "LEX_COLON";
+            case LEX_SEMICOLON:
+                return "LEX_SEMICOLON";
+            case LEX_COMMA:
+                return "LEX_COMMA";
+            case LEX_OPEN_BOX:
+                return "LEX_OPEN_BOX";
+            case LEX_CLOSE_BOX:
+                return "LEX_CLOSE_BOX";
+            case LEX_OPEN_PAREN:
+                return "LEX_OPEN_PAREN";
+            case LEX_CLOSE_PAREN:
+                return "LEX_CLOSE_PAREN";
+            case LEX_OPEN_BRACE:
+                return "LEX_OPEN_BRACE";
+            case LEX_CLOSE_BRACE:
+                return "LEX_CLOSE_BRACE";
+            case LEX_BACKSLASH:
+                return "LEX_BACKSLASH";
+            case LEX_ABSOLUTE_TIME_VALUE:
+                return "LEX_ABSOLUTE_TIME_VALUE";
+            case LEX_RELATIVE_TIME_VALUE:
+                return "LEX_RELATIVE_TIME_VALUE";
+
+            default:
+                return "** Unknown **";
+        }
+    }
+
+    public LexerSource getLexSource() {
+        return lexSource;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/LexerSource.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/LexerSource.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/LexerSource.java
new file mode 100644
index 0000000..0df5401
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/LexerSource.java
@@ -0,0 +1,62 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.io.IOException;
+
+public abstract class LexerSource {
+    /*
+     * LexerSource is a class that provides an abstract interface to the
+     * lexer. The lexer reads tokens and gives them to the parser.  The
+     * lexer reads single characters from a LexerSource. Because we want
+     * to read characters from different types of sources (files, strings,
+     * etc.) without burdening the lexer, the lexer uses this LexerSource
+     * abstract class. There are several implementations of the
+     * LexerSource that provide access to specific types of sources.
+     *
+     */
+    protected char previousCharacter;
+
+    public int getPosition() {
+        return -1;
+    }
+
+    // Reads a single character from the source
+    public abstract char readCharacter() throws IOException;
+
+    // Returns the last character read (from ReadCharacter()) from the
+    // source
+    public char readPreviousCharacter() {
+        return previousCharacter;
+    }
+
+    // Puts back a character so that when ReadCharacter is called
+    // again, it returns the character that was previously
+    // read. Although this interface appears to require the ability to
+    // put back an arbitrary number of characters, in practice we only
+    // ever put back a single character.
+    public abstract void unreadCharacter() throws IOException;
+
+    public abstract boolean atEnd() throws IOException;
+
+    public abstract char[] getBuffer();
+
+    public void setNewSource(char[] recordCharBuffer) {
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Literal.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Literal.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Literal.java
new file mode 100644
index 0000000..5254a37
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Literal.java
@@ -0,0 +1,521 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.util.Calendar;
+import java.util.TimeZone;
+
+import org.apache.asterix.external.classad.Value.NumberFactor;
+import org.apache.asterix.external.classad.Value.ValueType;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class Literal extends ExprTree {
+    /**
+     * Represents the literals of the ClassAd language, such as integers,
+     * reals, booleans, strings, undefined and real.
+     */
+    // literal specific information
+    private Value value;
+    private NumberFactor factor;
+
+    public Literal() {
+        factor = Value.NumberFactor.NO_FACTOR;
+        value = new Value();
+    }
+
+    @Override
+    public String toString() {
+        switch (value.getValueType()) {
+            case ABSOLUTE_TIME_VALUE:
+                return "datetime(" + value + ")";
+            case BOOLEAN_VALUE:
+                return String.valueOf(value.getBoolVal());
+            case CLASSAD_VALUE:
+            case LIST_VALUE:
+            case SLIST_VALUE:
+            case INTEGER_VALUE:
+            case NULL_VALUE:
+            case REAL_VALUE:
+                return value.toString();
+            case ERROR_VALUE:
+                return "\"error\"";
+            case RELATIVE_TIME_VALUE:
+                return "duration(" + value + ")";
+            case UNDEFINED_VALUE:
+                return "\"undefined\"";
+            case STRING_VALUE:
+                return "\"" + value.toString() + "\"";
+            default:
+                return null;
+        }
+    }
+
+    public Literal(Literal literal) throws HyracksDataException {
+        copyFrom(literal);
+    }
+
+    @Override
+    public ExprTree copy() throws HyracksDataException {
+        Literal newTree = new Literal();
+        newTree.copyFrom(this);
+        return newTree;
+    }
+
+    public void copyFrom(Literal literal) throws HyracksDataException {
+        super.copyFrom(literal);
+        value.copyFrom(literal.value);
+        factor = literal.factor;
+        return;
+    }
+
+    public static Literal createReal(AMutableCharArrayString aString) throws HyracksDataException {
+        Value val = new Value();
+        double real;
+        real = Double.parseDouble(aString.toString());
+        val.setRealValue(real);
+        return createLiteral(val);
+    }
+
+    public static Literal createReal(String aString) throws HyracksDataException {
+        Value val = new Value();
+        double real;
+        real = Double.parseDouble(aString.toString());
+        val.setRealValue(real);
+        return createLiteral(val);
+    }
+
+    public static Literal createAbsTime(ClassAdTime tim) throws HyracksDataException {
+        Value val = new Value();
+        if (tim == null) { // => current time/offset
+            tim = new ClassAdTime();
+        }
+        val.setAbsoluteTimeValue(tim);
+        return (createLiteral(val));
+    }
+
+    /* Creates an absolute time literal, from the string timestr,
+     *parsing it as the regular expression:
+     D* dddd [D* dd [D* dd [D* dd [D* dd [D* dd D*]]]]] [-dd:dd | +dd:dd | z | Z]
+     D => non-digit, d=> digit
+     Ex - 2003-01-25T09:00:00-06:00
+    */
+    public static Literal createAbsTime(AMutableCharArrayString timeStr) throws HyracksDataException {
+        Value val = new Value();
+        boolean offset = false; // to check if the argument conatins a timezone offset parameter
+
+        AMutableInt32 tzhr = new AMutableInt32(0); // corresponds to 1st "dd" in -|+dd:dd
+        AMutableInt32 tzmin = new AMutableInt32(0); // corresponds to 2nd "dd" in -|+dd:dd
+
+        int len = timeStr.getLength();
+        AMutableInt32 index = new AMutableInt32(len - 1);
+        prevNonSpaceChar(timeStr, index);
+        AMutableInt32 i = new AMutableInt32(index.getIntegerValue());
+        if ((timeStr.charAt(i.getIntegerValue()) == 'z') || (timeStr.charAt(i.getIntegerValue()) == 'Z')) { // z|Z corresponds to a timezone offset of 0
+            offset = true;
+            timeStr.erase(i.getIntegerValue()); // remove the offset section from the string
+        } else if (timeStr.charAt(len - 5) == '+' || timeStr.charAt(len - 5) == '-') {
+            offset = extractTimeZone(timeStr, tzhr, tzmin);
+        } else if ((timeStr.charAt(len - 6) == '+' || timeStr.charAt(len - 6) == '-')
+                && timeStr.charAt(len - 3) == ':') {
+            timeStr.erase(len - 3, 1);
+            offset = extractTimeZone(timeStr, tzhr, tzmin);
+        }
+
+        i.setValue(0);
+        len = timeStr.getLength();
+        nextDigitChar(timeStr, i);
+        if (i.getIntegerValue() > len - 4) { // string has to contain dddd (year)
+            val.setErrorValue();
+            return (createLiteral(val));
+        }
+        int tm_year, tm_mon = 0, tm_mday = 0, tm_hour = 0, tm_min = 0, tm_sec = 0;
+        tm_year = Integer.parseInt((timeStr.substr(i.getIntegerValue(), 4)));// - 1900;
+        i.setValue(i.getIntegerValue() + 4);
+        nextDigitChar(timeStr, i);
+        if (i.getIntegerValue() <= len - 2) {
+            tm_mon = Integer.parseInt(timeStr.substr(i.getIntegerValue(), 2)) - 1;
+            i.setValue(i.getIntegerValue() + 2);
+        }
+        nextDigitChar(timeStr, i);
+
+        if (i.getIntegerValue() <= len - 2) {
+            tm_mday = Integer.parseInt(timeStr.substr(i.getIntegerValue(), 2));
+            i.setValue(i.getIntegerValue() + 2);
+        }
+        nextDigitChar(timeStr, i);
+
+        if (i.getIntegerValue() <= len - 2) {
+            tm_hour = Integer.parseInt(timeStr.substr(i.getIntegerValue(), 2));
+            i.setValue(i.getIntegerValue() + 2);
+        }
+        nextDigitChar(timeStr, i);
+
+        if (i.getIntegerValue() <= len - 2) {
+            tm_min = Integer.parseInt(timeStr.substr(i.getIntegerValue(), 2));
+            i.setValue(i.getIntegerValue() + 2);
+        }
+        nextDigitChar(timeStr, i);
+
+        if (i.getIntegerValue() <= len - 2) {
+            tm_sec = Integer.parseInt(timeStr.substr(i.getIntegerValue(), 2));
+            i.setValue(i.getIntegerValue() + 2);
+        }
+        nextDigitChar(timeStr, i);
+
+        if ((i.getIntegerValue() <= len - 1) && (Character.isDigit(timeStr.charAt(i.getIntegerValue())))) { // there should be no more digit characters once the required
+            val.setErrorValue(); // parameteres are parsed
+            return (createLiteral(val));
+        }
+        Calendar cal = Calendar.getInstance(TimeZone.getTimeZone("GMT"));
+        cal.clear();
+        cal.set(tm_year, tm_mon, tm_mday, tm_hour, tm_min, tm_sec);
+        ClassAdTime time = new ClassAdTime();
+        time.setValue(cal.getTimeInMillis());
+        if (offset) {
+            time.setTimeZone((tzhr.getIntegerValue() * 3600000) + (tzmin.getIntegerValue() * 60000));
+        } else {
+            // if offset is not specified, the offset of the current locality is taken
+            time.setDefaultTimeZone();
+        }
+        val.setAbsoluteTimeValue(time);
+        return (createLiteral(val));
+    }
+
+    public Literal createRelTime(ClassAdTime t1, ClassAdTime t2) throws HyracksDataException {
+        Value val = new Value();
+        val.setRelativeTimeValue(t1.subtract(t2));
+        return (createLiteral(val));
+    }
+
+    Literal createRelTime(ClassAdTime secs) throws HyracksDataException {
+        Value val = new Value();
+        val.setRelativeTimeValue(secs);
+        return (createLiteral(val));
+    }
+
+    /* Creates a relative time literal, from the string timestr,
+     *parsing it as [[[days+]hh:]mm:]ss
+     * Ex - 1+00:02:00
+     */
+    public static Literal createRelTime(AMutableCharArrayString timeStr) throws HyracksDataException {
+        Value val = new Value();
+        ClassAdTime rsecs = new ClassAdTime();
+
+        int len = timeStr.getLength();
+        double secs = 0;
+        int mins = 0;
+        int hrs = 0;
+        int days = 0;
+        boolean negative = false;
+
+        AMutableInt32 i = new AMutableInt32(len - 1);
+        prevNonSpaceChar(timeStr, i);
+        // checking for 'sec' parameter & collecting it if present (ss.sss)
+        if ((i.getIntegerValue() >= 0)
+                && ((timeStr.charAt(i.getIntegerValue()) == 's') || (timeStr.charAt(i.getIntegerValue()) == 'S')
+                        || (Character.isDigit(timeStr.charAt(i.getIntegerValue()))))) {
+            if ((timeStr.charAt(i.getIntegerValue()) == 's') || (timeStr.charAt(i.getIntegerValue()) == 'S')) {
+                i.setValue(i.getIntegerValue() - 1);
+            }
+            prevNonSpaceChar(timeStr, i);
+            AMutableCharArrayString revSecStr = new AMutableCharArrayString();
+            while ((i.getIntegerValue() >= 0) && (Character.isDigit(timeStr.charAt(i.getIntegerValue())))) {
+                revSecStr.appendChar(timeStr.charAt(i.getIntegerValue()));
+                i.setValue(i.getIntegerValue() - 1);
+            }
+            if ((i.getIntegerValue() >= 0) && (timeStr.charAt(i.getIntegerValue()) == '.')) {
+                revSecStr.appendChar(timeStr.charAt(i.getIntegerValue()));
+                i.setValue(i.getIntegerValue() - 1);
+                while ((i.getIntegerValue() >= 0) && (Character.isDigit(timeStr.charAt(i.getIntegerValue())))) {
+                    revSecStr.appendChar(timeStr.charAt(i.getIntegerValue()));
+                    i.setValue(i.getIntegerValue() - 1);
+                }
+            }
+            secs = revDouble(revSecStr);
+        }
+
+        prevNonSpaceChar(timeStr, i);
+        // checking for 'min' parameter
+        if ((i.getIntegerValue() >= 0) && ((timeStr.charAt(i.getIntegerValue()) == 'm')
+                || (timeStr.charAt(i.getIntegerValue()) == 'M') || (timeStr.charAt(i.getIntegerValue()) == ':'))) {
+            i.setValue(i.getIntegerValue() - 1);
+            AMutableCharArrayString revMinStr = new AMutableCharArrayString();
+            prevNonSpaceChar(timeStr, i);
+            while ((i.getIntegerValue() >= 0) && (Character.isDigit(timeStr.charAt(i.getIntegerValue())))) {
+                revMinStr.appendChar(timeStr.charAt(i.getIntegerValue()));
+                i.setValue(i.getIntegerValue() - 1);
+            }
+            mins = revInt(revMinStr);
+        }
+
+        prevNonSpaceChar(timeStr, i);
+        // checking for 'hrs' parameter
+        if ((i.getIntegerValue() >= 0) && ((timeStr.charAt(i.getIntegerValue()) == 'h')
+                || (timeStr.charAt(i.getIntegerValue()) == 'H') || (timeStr.charAt(i.getIntegerValue()) == ':'))) {
+            i.setValue(i.getIntegerValue() - 1);
+            AMutableCharArrayString revHrStr = new AMutableCharArrayString();
+            prevNonSpaceChar(timeStr, i);
+            while ((i.getIntegerValue() >= 0) && (Character.isDigit(timeStr.charAt(i.getIntegerValue())))) {
+                revHrStr.appendChar(timeStr.charAt(i.getIntegerValue()));
+                i.setValue(i.getIntegerValue() - 1);
+            }
+            hrs = revInt(revHrStr);
+        }
+
+        prevNonSpaceChar(timeStr, i);
+        // checking for 'days' parameter
+        if ((i.getIntegerValue() >= 0) && ((timeStr.charAt(i.getIntegerValue()) == 'd')
+                || (timeStr.charAt(i.getIntegerValue()) == 'D') || (timeStr.charAt(i.getIntegerValue()) == '+'))) {
+            i.setValue(i.getIntegerValue() - 1);
+            AMutableCharArrayString revDayStr = new AMutableCharArrayString();
+            prevNonSpaceChar(timeStr, i);
+            while ((i.getIntegerValue() >= 0) && (Character.isDigit(timeStr.charAt(i.getIntegerValue())))) {
+                revDayStr.appendChar(timeStr.charAt(i.getIntegerValue()));
+                i.setValue(i.getIntegerValue() - 1);
+            }
+            days = revInt(revDayStr);
+        }
+
+        prevNonSpaceChar(timeStr, i);
+        // checking for '-' operator
+        if ((i.getIntegerValue() >= 0) && (timeStr.charAt(i.getIntegerValue()) == '-')) {
+            negative = true;
+            i.setValue(i.getIntegerValue() - 1);
+        }
+
+        prevNonSpaceChar(timeStr, i);
+
+        if ((i.getIntegerValue() >= 0) && (!(Character.isWhitespace(timeStr.charAt(i.getIntegerValue()))))) { // should not conatin any non-space char beyond -,d,h,m,s
+            val.setErrorValue();
+            return (createLiteral(val));
+        }
+
+        rsecs.setRelativeTime(
+                (long) ((negative ? -1 : +1) * (days * 86400000 + hrs * 3600000 + mins * 60000 + secs * 1000.0)));
+        val.setRelativeTimeValue(rsecs);
+
+        return (createLiteral(val));
+    }
+
+    /* Function which iterates through the string Str from the location 'index',
+     *returning the index of the next digit-char
+     */
+    public static void nextDigitChar(AMutableCharArrayString Str, AMutableInt32 index) {
+        int len = Str.getLength();
+        int i = index.getIntegerValue();
+        while ((i < len) && (!Character.isDigit(Str.charAt(i)))) {
+            i++;
+        }
+        index.setValue(i);
+    }
+
+    /* Function which iterates through the string Str backwards from the location 'index'
+     *returning the index of the first occuring non-space character
+     */
+    public static void prevNonSpaceChar(AMutableCharArrayString Str, AMutableInt32 index) {
+        int i = index.getIntegerValue();
+        while ((i >= 0) && (Character.isWhitespace(Str.charAt(i)))) {
+            i--;
+        }
+        index.setValue(i);
+    }
+
+    /* Function which takes a number in string format, and reverses the
+     * order of the digits & returns the corresponding number as an
+     * integer.
+     */
+    public static int revInt(AMutableCharArrayString revNumStr) {
+        AMutableCharArrayString numStr = new AMutableCharArrayString(revNumStr.getLength());
+        for (int i = revNumStr.getLength() - 1; i >= 0; i--) {
+            numStr.appendChar(revNumStr.charAt(i));
+        }
+        return Integer.parseInt(numStr.toString());
+    }
+
+    /* Function which takes a number in string format, and reverses the
+     * order of the digits & returns the corresponding number as a double.
+     */
+    public static double revDouble(AMutableCharArrayString revNumStr) {
+        AMutableCharArrayString numStr = new AMutableCharArrayString(revNumStr.getLength());
+        for (int i = revNumStr.getLength() - 1; i >= 0; i--) {
+            numStr.appendChar(revNumStr.charAt(i));
+        }
+        return Double.parseDouble(numStr.toString());
+    }
+
+    /* function which returns the timezone offset corresponding to the argument epochsecs,
+     *  which is the number of seconds since the epoch
+     */
+    public static int findOffset(ClassAdTime epochsecs) {
+        return Util.timezoneOffset(epochsecs);
+    }
+
+    public static Literal createLiteral(Value val, NumberFactor f) throws HyracksDataException {
+        if (val.getType() == ValueType.CLASSAD_VALUE || val.getType() == ValueType.LIST_VALUE
+                || val.getType() == ValueType.SLIST_VALUE) {
+            throw new HyracksDataException("list and classad values are not literals");
+        }
+        Literal lit = new Literal();
+        lit.value.copyFrom(val);
+        if (!val.isIntegerValue() && !val.isRealValue())
+            f = NumberFactor.NO_FACTOR;
+        lit.factor = f;
+        return lit;
+    }
+
+    public static void createLiteral(Literal lit, Value val, NumberFactor f) throws HyracksDataException {
+        if (val.getType() == ValueType.CLASSAD_VALUE || val.getType() == ValueType.LIST_VALUE
+                || val.getType() == ValueType.SLIST_VALUE) {
+            throw new HyracksDataException("list and classad values are not literals");
+        }
+        lit.value.copyFrom(val);
+        if (!val.isIntegerValue() && !val.isRealValue())
+            f = NumberFactor.NO_FACTOR;
+        lit.factor = f;
+    }
+
+    public static Literal createLiteral(Value val) throws HyracksDataException {
+        return createLiteral(val, NumberFactor.NO_FACTOR);
+    }
+
+    public void GetValue(Value val) throws HyracksDataException {
+        AMutableInt64 i = new AMutableInt64(0);
+        AMutableDouble r = new AMutableDouble(0);
+        val.copyFrom(value);
+
+        // if integer or real, multiply by the factor
+        if (val.isIntegerValue(i)) {
+            if (factor != NumberFactor.NO_FACTOR) {
+                val.setRealValue((i.getLongValue()) * Value.ScaleFactor[factor.ordinal()]);
+            }
+        } else if (val.isRealValue(r)) {
+            if (factor != NumberFactor.NO_FACTOR) {
+                val.setRealValue(r.getDoubleValue() * Value.ScaleFactor[factor.ordinal()]);
+            }
+        }
+    }
+
+    public void getComponents(Value val, AMutableNumberFactor factor) throws HyracksDataException {
+        val.copyFrom(value);
+        factor.setFactor(this.factor);
+    }
+
+    @Override
+    public boolean sameAs(ExprTree tree) {
+        boolean is_same;
+        ExprTree pSelfTree = tree.self();
+
+        if (this == pSelfTree) {
+            is_same = true;
+        } else if (pSelfTree.getKind() != NodeKind.LITERAL_NODE) {
+            is_same = false;
+        } else {
+            Literal other_literal = (Literal) pSelfTree;
+            is_same = (factor == other_literal.factor && value.sameAs(other_literal.value));
+        }
+        return is_same;
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o instanceof Literal) {
+            Literal literal = (Literal) o;
+            return sameAs(literal);
+        }
+        return false;
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState eval, Value val) throws HyracksDataException {
+        AMutableInt64 i = new AMutableInt64(0);
+        AMutableDouble r = new AMutableDouble(0);
+
+        val.copyFrom(value);
+
+        // if integer or real, multiply by the factor
+        if (val.isIntegerValue(i)) {
+            if (factor != NumberFactor.NO_FACTOR) {
+                val.setRealValue((i.getLongValue()) * Value.ScaleFactor[factor.ordinal()]);
+            } else {
+                val.setIntegerValue(i.getLongValue());
+            }
+        } else if (val.isRealValue(r)) {
+            val.setRealValue(r.getDoubleValue() * Value.ScaleFactor[factor.ordinal()]);
+        }
+        return true;
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val, ExprTreeHolder tree) throws HyracksDataException {
+        privateEvaluate(state, val);
+        tree.setInnerTree(copy());
+        return (tree != null);
+    }
+
+    @Override
+    public boolean privateFlatten(EvalState state, Value val, ExprTreeHolder tree, AMutableInt32 i)
+            throws HyracksDataException {
+        tree.reset();
+        return privateEvaluate(state, val);
+    }
+
+    public static boolean extractTimeZone(AMutableCharArrayString timeStr, AMutableInt32 tzhr, AMutableInt32 tzmin) {
+        int len = timeStr.getLength();
+        int i = len - 1;
+        boolean offset = false;
+        String offStr = timeStr.toString().substring(i - 4, len);
+
+        if (((offStr.charAt(0) == '+') || (offStr.charAt(0) == '-')) && (Character.isDigit(offStr.charAt(1)))
+                && (Character.isDigit(offStr.charAt(2))) && (Character.isDigit(offStr.charAt(3)))
+                && (Character.isDigit(offStr.charAt(4)))) {
+            offset = true;
+            timeStr.erase(i - 4, 5);
+            if (offStr.charAt(0) == '+') {
+                tzhr.setValue(Integer.parseInt(offStr.substring(1, 3)));
+                tzmin.setValue(Integer.parseInt(offStr.substring(3, 5)));
+            } else {
+                tzhr.setValue((-1) * Integer.parseInt(offStr.substring(1, 3)));
+                tzmin.setValue((-1) * Integer.parseInt(offStr.substring(3, 5)));
+            }
+        }
+        return offset;
+    }
+
+    @Override
+    public NodeKind getKind() {
+        return NodeKind.LITERAL_NODE;
+    }
+
+    @Override
+    protected void privateSetParentScope(ClassAd scope) {
+    }
+
+    @Override
+    public void reset() {
+        value.clear();
+        factor = NumberFactor.NO_FACTOR;
+    }
+
+    public Value getValue() {
+        return value;
+    }
+}


[05/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/resources/jobads.new
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/jobads.new b/asterix-external-data/src/test/resources/jobads.new
new file mode 100644
index 0000000..2ca4919
--- /dev/null
+++ b/asterix-external-data/src/test/resources/jobads.new
@@ -0,0 +1,12869 @@
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446112223; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.179100000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.850540000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_2696692,ChtcWrapper159.out,AuditLog.159,simu_3_159.txt,harvest.log,159.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 2.195400000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.056100000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/159"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134176; 
+        LastMatchTime = 1446112222; 
+        LastJobLeaseRenewal = 1446134176; 
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582557; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=159 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134177; 
+        QDate = 1446105741; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/159/process.log"; 
+        JobCurrentStartDate = 1446112222; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "159+159"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 21954; 
+        AutoClusterId = 24; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.152"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49581933; 
+        EnteredCurrentStatus = 1446134176; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446112222; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582557.0#1446105741"; 
+        RemoteSysCpu = 1.370000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.195400000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e352.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/159/,/home/xguo23/finally_2/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 125604; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.152:39021>#1444772294#9281#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 159+159"; 
+        CumulativeSlotTime = 2.195400000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 21953; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446111648; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.235300000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_818403,ChtcWrapper211.out,AuditLog.211,simu_3_211.txt,harvest.log,211.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 2.252000000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.060300000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134167; 
+        QDate = 1446105734; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134167; 
+        LastMatchTime = 1446111647; 
+        LastJobLeaseRenewal = 1446134167; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582533; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211/process.log"; 
+        JobCurrentStartDate = 1446111647; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=211 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "211+211"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 22520; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.61"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134167; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446111647; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582533.0#1446105734"; 
+        RemoteSysCpu = 1.370000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.252000000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e261.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/211/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 126608; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.61:49736>#1444759807#6759#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 211+211"; 
+        CumulativeSlotTime = 2.252000000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 22519; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446134109; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.400000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "chtcinnerwrapper,CURLTIME_137795,ChtcWrapper11021.out,R2011b_INFO,AuditLog.11021,SLIBS2.tar.gz,CODEBLOWUP"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5124; 
+        RemoteWallClockTime = 5.800000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727270000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 160; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 160; 
+        RecentStatsLifetimeStarter = 48; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134165; 
+        QDate = 1446134012; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134165; 
+        LastMatchTime = 1446134107; 
+        LastJobLeaseRenewal = 1446134165; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49584018; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/11021/process.log"; 
+        JobCurrentStartDate = 1446134107; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=11021 --"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "11021+11021"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 58; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.39"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134165; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446134107; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49584018.0#1446134012"; 
+        RemoteSysCpu = 1.200000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 5.800000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 14; 
+        LastRemoteHost = "slot1@e239.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/11021/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5124; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.39:54850>#1445038698#5043#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 11021+11021"; 
+        CumulativeSlotTime = 5.800000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 14; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1139127; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 56; 
+        ImageSize = 7500; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        WantGlidein = true; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/11021"
+    ]
+
+    [
+        BlockWrites = 4; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446108996; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.477600000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.850540000000000E+05; 
+        ResidentSetSize = 100000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "harvest.log,ChtcWrapper407.out,AuditLog.407,CURLTIME_1861323,407.out,simu_3_407.txt"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 123648; 
+        RemoteWallClockTime = 2.513300000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.056100000000000E+04; 
+        LastRejMatchReason = "PREEMPTION_REQUIREMENTS == False "; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 3976; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 30280; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/407"; 
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134128; 
+        LastMatchTime = 1446108995; 
+        LastJobLeaseRenewal = 1446134128; 
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582261; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=407 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134128; 
+        QDate = 1446105631; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/407/process.log"; 
+        JobCurrentStartDate = 1446108995; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "407+407"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 25133; 
+        AutoClusterId = 38210; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 16; 
+        ExitBySignal = false; 
+        DAGManJobId = 49581933; 
+        EnteredCurrentStatus = 1446134128; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446108995; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582261.0#1446105631"; 
+        RemoteSysCpu = 2.770000000000000E+02; 
+        LastRejMatchTime = 1446108994; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.513300000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 906; 
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/407/,/home/xguo23/finally_2/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 76112; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1604#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 407+407"; 
+        CumulativeSlotTime = 2.513300000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 313; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 25132; 
+        ImageSize = 125000
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446121054; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.293400000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_37424,ChtcWrapper409.out,AuditLog.409,simu_3_409.txt,harvest.log,409.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.305100000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.787300000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134104; 
+        LastMatchTime = 1446121053; 
+        LastJobLeaseRenewal = 1446134104; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49583239; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=409 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134104; 
+        QDate = 1446106003; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409/process.log"; 
+        JobCurrentStartDate = 1446121053; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "409+409"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 13051; 
+        AutoClusterId = 24; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.242"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134104; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446121053; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583239.0#1446106003"; 
+        RemoteSysCpu = 9.300000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.305100000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e442.chtc.WISC.EDU"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/409/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 127216; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.242:38884>#1443991450#10456#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 409+409"; 
+        CumulativeSlotTime = 1.305100000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 13050; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1445943853; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.852360000000000E+05; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.843670000000000E+05; 
+        ResidentSetSize = 125000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.902470000000000E+05; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.076600000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134099; 
+        QDate = 1445938922; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134099; 
+        LastMatchTime = 1445943852; 
+        LastJobLeaseRenewal = 1446134099; 
+        DAGManNodesLog = "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49573720; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally/Simulation_condor/model_3/180/process.log"; 
+        JobCurrentStartDate = 1445943852; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "180+180"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 190247; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.72"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49572657; 
+        EnteredCurrentStatus = 1446134099; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1445943852; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49573720.0#1445938922"; 
+        RemoteSysCpu = 1.835000000000000E+03; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.902470000000000E+05; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e272.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 123680; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.72:29075>#1444753997#6000#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 180+180"; 
+        CumulativeSlotTime = 1.902470000000000E+05; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 190245; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/finally/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally/Simulation_condor/model_3/180"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446114726; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.908100000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 75000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "harvest.log,232.out,ChtcWrapper232.out,AuditLog.232,CURLTIME_1864147,simu_3_232.txt"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 118772; 
+        RemoteWallClockTime = 1.933800000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.791100000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 12; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 26436; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134062; 
+        QDate = 1446105779; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134062; 
+        LastMatchTime = 1446114724; 
+        LastJobLeaseRenewal = 1446134062; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582659; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232/process.log"; 
+        JobCurrentStartDate = 1446114724; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=232 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "232+232"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 19338; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134062; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446114724; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582659.0#1446105779"; 
+        RemoteSysCpu = 1.790000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.933800000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 615; 
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/232/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 71268; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1612#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 232+232"; 
+        CumulativeSlotTime = 1.933800000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 3; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216668; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 19336; 
+        ImageSize = 125000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446133964; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.200000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "R2011b_INFO,CODEBLOWUP,AuditLog.10012,SLIBS2.tar.gz,ChtcWrapper10012.out,CURLTIME_2575055,chtcinnerwrapper"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5128; 
+        RemoteWallClockTime = 7.700000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727355000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 160; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 160; 
+        LocalSysCpu = 0.0; 
+        WantGlidein = true; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/10012"; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 67; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134040; 
+        LastMatchTime = 1446133963; 
+        LastJobLeaseRenewal = 1446134040; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49583905; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=10012 --"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134040; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/10012/process.log"; 
+        JobCurrentStartDate = 1446133963; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        JobLeaseDuration = 2400; 
+        QDate = 1446133888; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "10012+10012"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 77; 
+        AutoClusterId = 38267; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.244.69"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134040; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446133963; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583905.0#1446133888"; 
+        RemoteSysCpu = 1.200000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 7.700000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 12; 
+        LastRemoteHost = "slot1_2@e189.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/10012/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5128; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.244.69:4177>#1444973293#3769#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 10012+10012"; 
+        CumulativeSlotTime = 7.700000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 12; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1211433; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 76; 
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446115779; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.811800000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.847170000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_3140097,ChtcWrapper3.out,AuditLog.3,simu_3_3.txt,harvest.log,3.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.824800000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.789600000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134026; 
+        QDate = 1446105835; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134026; 
+        LastMatchTime = 1446115778; 
+        LastJobLeaseRenewal = 1446134026; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582786; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3/process.log"; 
+        JobCurrentStartDate = 1446115778; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=3 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "3+3"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 18248; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.107"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582778; 
+        EnteredCurrentStatus = 1446134026; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446115778; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582786.0#1446105835"; 
+        RemoteSysCpu = 1.080000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.824800000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e307.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.46/Simulation_condor/data/3/,/home/xguo23/model_3_1.46/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 125940; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.107:63744>#1444685448#11070#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 3+3"; 
+        CumulativeSlotTime = 1.824800000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 18247; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.46/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3"
+    ]
+
+    [
+        BlockWrites = 506; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446133964; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.100000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "chtcinnerwrapper,SLIBS2.tar.gz,R2011b_INFO,AuditLog.20111,CURLTIME_1051736,ChtcWrapper20111.out,CODEBLOWUP"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5056; 
+        RemoteWallClockTime = 5.800000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727274000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 164; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 164; 
+        LocalSysCpu = 0.0; 
+        WantGlidein = true; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/20111"; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 43; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 506; 
+        CompletionDate = 1446134021; 
+        LastMatchTime = 1446133963; 
+        LastJobLeaseRenewal = 1446134021; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49583938; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=20111 --"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134021; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/20111/process.log"; 
+        JobCurrentStartDate = 1446133963; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        JobLeaseDuration = 2400; 
+        QDate = 1446133922; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "20111+20111"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 58; 
+        AutoClusterId = 38259; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.244.37"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 249656; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134021; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 249656; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446133963; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583938.0#1446133922"; 
+        RemoteSysCpu = 7.000000000000000E+00; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 5.800000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 16; 
+        LastRemoteHost = "slot1_10@e168.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/20111/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5056; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.244.37:57713>#1445396629#2313#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 20111+20111"; 
+        CumulativeSlotTime = 5.800000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 16; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1205568; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 52; 
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446115115; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.878200000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 125000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_2890029,ChtcWrapper260.out,AuditLog.260,simu_3_260.txt,harvest.log,260.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.890300000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.050700000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134017; 
+        QDate = 1446105803; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134017; 
+        LastMatchTime = 1446115114; 
+        LastJobLeaseRenewal = 1446134017; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582724; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/260/process.log"; 
+        JobCurrentStartDate = 1446115114; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=260 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "260+260"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+   

<TRUNCATED>


[03/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/resources/jobads.txt
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/jobads.txt b/asterix-external-data/src/test/resources/jobads.txt
new file mode 100644
index 0000000..2ca4919
--- /dev/null
+++ b/asterix-external-data/src/test/resources/jobads.txt
@@ -0,0 +1,12869 @@
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446112223; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.179100000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.850540000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_2696692,ChtcWrapper159.out,AuditLog.159,simu_3_159.txt,harvest.log,159.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 2.195400000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.056100000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/159"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134176; 
+        LastMatchTime = 1446112222; 
+        LastJobLeaseRenewal = 1446134176; 
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582557; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=159 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134177; 
+        QDate = 1446105741; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/159/process.log"; 
+        JobCurrentStartDate = 1446112222; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "159+159"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 21954; 
+        AutoClusterId = 24; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.152"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49581933; 
+        EnteredCurrentStatus = 1446134176; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446112222; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582557.0#1446105741"; 
+        RemoteSysCpu = 1.370000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.195400000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e352.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/159/,/home/xguo23/finally_2/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 125604; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.152:39021>#1444772294#9281#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 159+159"; 
+        CumulativeSlotTime = 2.195400000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 21953; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446111648; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.235300000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_818403,ChtcWrapper211.out,AuditLog.211,simu_3_211.txt,harvest.log,211.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 2.252000000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.060300000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134167; 
+        QDate = 1446105734; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134167; 
+        LastMatchTime = 1446111647; 
+        LastJobLeaseRenewal = 1446134167; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582533; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211/process.log"; 
+        JobCurrentStartDate = 1446111647; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=211 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "211+211"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 22520; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.61"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134167; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446111647; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582533.0#1446105734"; 
+        RemoteSysCpu = 1.370000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.252000000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e261.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/211/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 126608; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.61:49736>#1444759807#6759#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 211+211"; 
+        CumulativeSlotTime = 2.252000000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 22519; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446134109; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.400000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "chtcinnerwrapper,CURLTIME_137795,ChtcWrapper11021.out,R2011b_INFO,AuditLog.11021,SLIBS2.tar.gz,CODEBLOWUP"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5124; 
+        RemoteWallClockTime = 5.800000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727270000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 160; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 160; 
+        RecentStatsLifetimeStarter = 48; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134165; 
+        QDate = 1446134012; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134165; 
+        LastMatchTime = 1446134107; 
+        LastJobLeaseRenewal = 1446134165; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49584018; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/11021/process.log"; 
+        JobCurrentStartDate = 1446134107; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=11021 --"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "11021+11021"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 58; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.39"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134165; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446134107; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49584018.0#1446134012"; 
+        RemoteSysCpu = 1.200000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 5.800000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 14; 
+        LastRemoteHost = "slot1@e239.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/11021/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5124; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.39:54850>#1445038698#5043#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 11021+11021"; 
+        CumulativeSlotTime = 5.800000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 14; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1139127; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 56; 
+        ImageSize = 7500; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        WantGlidein = true; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/11021"
+    ]
+
+    [
+        BlockWrites = 4; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446108996; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.477600000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.850540000000000E+05; 
+        ResidentSetSize = 100000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "harvest.log,ChtcWrapper407.out,AuditLog.407,CURLTIME_1861323,407.out,simu_3_407.txt"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 123648; 
+        RemoteWallClockTime = 2.513300000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.056100000000000E+04; 
+        LastRejMatchReason = "PREEMPTION_REQUIREMENTS == False "; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 3976; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 30280; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/407"; 
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134128; 
+        LastMatchTime = 1446108995; 
+        LastJobLeaseRenewal = 1446134128; 
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582261; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=407 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134128; 
+        QDate = 1446105631; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/407/process.log"; 
+        JobCurrentStartDate = 1446108995; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "407+407"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 25133; 
+        AutoClusterId = 38210; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 16; 
+        ExitBySignal = false; 
+        DAGManJobId = 49581933; 
+        EnteredCurrentStatus = 1446134128; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446108995; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582261.0#1446105631"; 
+        RemoteSysCpu = 2.770000000000000E+02; 
+        LastRejMatchTime = 1446108994; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.513300000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 906; 
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/407/,/home/xguo23/finally_2/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 76112; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1604#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 407+407"; 
+        CumulativeSlotTime = 2.513300000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 313; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 25132; 
+        ImageSize = 125000
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446121054; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.293400000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_37424,ChtcWrapper409.out,AuditLog.409,simu_3_409.txt,harvest.log,409.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.305100000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.787300000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134104; 
+        LastMatchTime = 1446121053; 
+        LastJobLeaseRenewal = 1446134104; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49583239; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=409 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134104; 
+        QDate = 1446106003; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409/process.log"; 
+        JobCurrentStartDate = 1446121053; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "409+409"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 13051; 
+        AutoClusterId = 24; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.242"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134104; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446121053; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583239.0#1446106003"; 
+        RemoteSysCpu = 9.300000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.305100000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e442.chtc.WISC.EDU"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/409/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 127216; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.242:38884>#1443991450#10456#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 409+409"; 
+        CumulativeSlotTime = 1.305100000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 13050; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1445943853; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.852360000000000E+05; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.843670000000000E+05; 
+        ResidentSetSize = 125000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.902470000000000E+05; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.076600000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134099; 
+        QDate = 1445938922; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134099; 
+        LastMatchTime = 1445943852; 
+        LastJobLeaseRenewal = 1446134099; 
+        DAGManNodesLog = "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49573720; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally/Simulation_condor/model_3/180/process.log"; 
+        JobCurrentStartDate = 1445943852; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "180+180"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 190247; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.72"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49572657; 
+        EnteredCurrentStatus = 1446134099; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1445943852; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49573720.0#1445938922"; 
+        RemoteSysCpu = 1.835000000000000E+03; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.902470000000000E+05; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e272.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 123680; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.72:29075>#1444753997#6000#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 180+180"; 
+        CumulativeSlotTime = 1.902470000000000E+05; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 190245; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/finally/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally/Simulation_condor/model_3/180"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446114726; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.908100000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 75000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "harvest.log,232.out,ChtcWrapper232.out,AuditLog.232,CURLTIME_1864147,simu_3_232.txt"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 118772; 
+        RemoteWallClockTime = 1.933800000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.791100000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 12; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 26436; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134062; 
+        QDate = 1446105779; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134062; 
+        LastMatchTime = 1446114724; 
+        LastJobLeaseRenewal = 1446134062; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582659; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232/process.log"; 
+        JobCurrentStartDate = 1446114724; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=232 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "232+232"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 19338; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134062; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446114724; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582659.0#1446105779"; 
+        RemoteSysCpu = 1.790000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.933800000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 615; 
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/232/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 71268; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1612#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 232+232"; 
+        CumulativeSlotTime = 1.933800000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 3; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216668; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 19336; 
+        ImageSize = 125000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446133964; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.200000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "R2011b_INFO,CODEBLOWUP,AuditLog.10012,SLIBS2.tar.gz,ChtcWrapper10012.out,CURLTIME_2575055,chtcinnerwrapper"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5128; 
+        RemoteWallClockTime = 7.700000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727355000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 160; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 160; 
+        LocalSysCpu = 0.0; 
+        WantGlidein = true; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/10012"; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 67; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134040; 
+        LastMatchTime = 1446133963; 
+        LastJobLeaseRenewal = 1446134040; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49583905; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=10012 --"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134040; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/10012/process.log"; 
+        JobCurrentStartDate = 1446133963; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        JobLeaseDuration = 2400; 
+        QDate = 1446133888; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "10012+10012"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 77; 
+        AutoClusterId = 38267; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.244.69"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134040; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446133963; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583905.0#1446133888"; 
+        RemoteSysCpu = 1.200000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 7.700000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 12; 
+        LastRemoteHost = "slot1_2@e189.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/10012/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5128; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.244.69:4177>#1444973293#3769#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 10012+10012"; 
+        CumulativeSlotTime = 7.700000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 12; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1211433; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 76; 
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446115779; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.811800000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.847170000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_3140097,ChtcWrapper3.out,AuditLog.3,simu_3_3.txt,harvest.log,3.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.824800000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.789600000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134026; 
+        QDate = 1446105835; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134026; 
+        LastMatchTime = 1446115778; 
+        LastJobLeaseRenewal = 1446134026; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582786; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3/process.log"; 
+        JobCurrentStartDate = 1446115778; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=3 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "3+3"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 18248; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.107"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582778; 
+        EnteredCurrentStatus = 1446134026; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446115778; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582786.0#1446105835"; 
+        RemoteSysCpu = 1.080000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.824800000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e307.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.46/Simulation_condor/data/3/,/home/xguo23/model_3_1.46/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 125940; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.107:63744>#1444685448#11070#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 3+3"; 
+        CumulativeSlotTime = 1.824800000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 18247; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.46/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3"
+    ]
+
+    [
+        BlockWrites = 506; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446133964; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.100000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "chtcinnerwrapper,SLIBS2.tar.gz,R2011b_INFO,AuditLog.20111,CURLTIME_1051736,ChtcWrapper20111.out,CODEBLOWUP"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5056; 
+        RemoteWallClockTime = 5.800000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727274000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 164; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 164; 
+        LocalSysCpu = 0.0; 
+        WantGlidein = true; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/20111"; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 43; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 506; 
+        CompletionDate = 1446134021; 
+        LastMatchTime = 1446133963; 
+        LastJobLeaseRenewal = 1446134021; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49583938; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=20111 --"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134021; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/20111/process.log"; 
+        JobCurrentStartDate = 1446133963; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        JobLeaseDuration = 2400; 
+        QDate = 1446133922; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "20111+20111"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 58; 
+        AutoClusterId = 38259; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.244.37"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 249656; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134021; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 249656; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446133963; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583938.0#1446133922"; 
+        RemoteSysCpu = 7.000000000000000E+00; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 5.800000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 16; 
+        LastRemoteHost = "slot1_10@e168.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/20111/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5056; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.244.37:57713>#1445396629#2313#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 20111+20111"; 
+        CumulativeSlotTime = 5.800000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 16; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1205568; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 52; 
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446115115; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.878200000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 125000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_2890029,ChtcWrapper260.out,AuditLog.260,simu_3_260.txt,harvest.log,260.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.890300000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.050700000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134017; 
+        QDate = 1446105803; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134017; 
+        LastMatchTime = 1446115114; 
+        LastJobLeaseRenewal = 1446134017; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582724; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/260/process.log"; 
+        JobCurrentStartDate = 1446115114; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=260 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "260+260"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+   

<TRUNCATED>


[33/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/data/external-parser/dropbox/jobads1.txt
----------------------------------------------------------------------
diff --git a/asterix-app/data/external-parser/dropbox/jobads1.txt b/asterix-app/data/external-parser/dropbox/jobads1.txt
new file mode 100644
index 0000000..f420be4
--- /dev/null
+++ b/asterix-app/data/external-parser/dropbox/jobads1.txt
@@ -0,0 +1,12869 @@
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446112223;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 2.179100000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.850540000000000E+05;
+        ResidentSetSize = 150000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_2696692,ChtcWrapper159.out,AuditLog.159,simu_3_159.txt,harvest.log,159.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 2.195400000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.056100000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/159";
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134176;
+        LastMatchTime = 1446112222;
+        LastJobLeaseRenewal = 1446134176;
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582557;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=159 -- 3";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134177;
+        QDate = 1446105741;
+        JobLeaseDuration = 2400;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/159/process.log";
+        JobCurrentStartDate = 1446112222;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "159+159";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 21954;
+        AutoClusterId = 24;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.152";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49581933;
+        EnteredCurrentStatus = 1446134176;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446112222;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582557.0#1446105741";
+        RemoteSysCpu = 1.370000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 2.195400000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e352.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/159/,/home/xguo23/finally_2/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 125604;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.152:39021>#1444772294#9281#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 159+159";
+        CumulativeSlotTime = 2.195400000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 21953;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446111648;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 2.235300000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.846290000000000E+05;
+        ResidentSetSize = 150000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_818403,ChtcWrapper211.out,AuditLog.211,simu_3_211.txt,harvest.log,211.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 2.252000000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.060300000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134167;
+        QDate = 1446105734;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134167;
+        LastMatchTime = 1446111647;
+        LastJobLeaseRenewal = 1446134167;
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582533;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211/process.log";
+        JobCurrentStartDate = 1446111647;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=211 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "211+211";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 22520;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.61";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582200;
+        EnteredCurrentStatus = 1446134167;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446111647;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582533.0#1446105734";
+        RemoteSysCpu = 1.370000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 2.252000000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e261.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/211/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 126608;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.61:49736>#1444759807#6759#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 211+211";
+        CumulativeSlotTime = 2.252000000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 22519;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper";
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446134109;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.400000000000000E+01;
+        NiceUser = false;
+        BytesRecvd = 1.220270000000000E+06;
+        RequestMemory = 1000;
+        ResidentSetSize = 7500;
+        StreamOut = false;
+        SpooledOutputFiles = "chtcinnerwrapper,CURLTIME_137795,ChtcWrapper11021.out,R2011b_INFO,AuditLog.11021,SLIBS2.tar.gz,CODEBLOWUP";
+        OnExitRemove = true;
+        ImageSize_RAW = 5124;
+        RemoteWallClockTime = 5.800000000000000E+01;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 5;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.727270000000000E+06;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 160;
+        TransferInputSizeMB = 1;
+        Matlab = "R2011b";
+        BlockReadKbytes = 160;
+        RecentStatsLifetimeStarter = 48;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "dentler";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134165;
+        QDate = 1446134012;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134165;
+        LastMatchTime = 1446134107;
+        LastJobLeaseRenewal = 1446134165;
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log";
+        ClusterId = 49584018;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "dentler@chtc.wisc.edu";
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/11021/process.log";
+        JobCurrentStartDate = 1446134107;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=11021 --";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "11021+11021";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 58;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.39";
+        WantFlocking = true;
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49583804;
+        EnteredCurrentStatus = 1446134165;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446134107;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49584018.0#1446134012";
+        RemoteSysCpu = 1.200000000000000E+01;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 5.800000000000000E+01;
+        WantCheckpoint = false;
+        BlockReads = 14;
+        LastRemoteHost = "slot1@e239.chtc.wisc.edu";
+        TransferInput = "/home/dentler/ChtcRun/project_auction/11021/,/home/dentler/ChtcRun/project_auction/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 4000000;
+        ResidentSetSize_RAW = 5124;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.39:54850>#1445038698#5043#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 11021+11021";
+        CumulativeSlotTime = 5.800000000000000E+01;
+        JobRunCount = 1;
+        RecentBlockReads = 14;
+        StreamErr = false;
+        DiskUsage_RAW = 1139127;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 56;
+        ImageSize = 7500;
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper";
+        WantGlidein = true;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/11021"
+    ]
+
+    [
+        BlockWrites = 4;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446108996;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 2.477600000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.850540000000000E+05;
+        ResidentSetSize = 100000;
+        StreamOut = false;
+        SpooledOutputFiles = "harvest.log,ChtcWrapper407.out,AuditLog.407,CURLTIME_1861323,407.out,simu_3_407.txt";
+        OnExitRemove = true;
+        ImageSize_RAW = 123648;
+        RemoteWallClockTime = 2.513300000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.056100000000000E+04;
+        LastRejMatchReason = "PREEMPTION_REQUIREMENTS == False ";
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 3976;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 30280;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/407";
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper";
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134128;
+        LastMatchTime = 1446108995;
+        LastJobLeaseRenewal = 1446134128;
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582261;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=407 -- 3";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134128;
+        QDate = 1446105631;
+        JobLeaseDuration = 2400;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/407/process.log";
+        JobCurrentStartDate = 1446108995;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "407+407";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 25133;
+        AutoClusterId = 38210;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 16;
+        ExitBySignal = false;
+        DAGManJobId = 49581933;
+        EnteredCurrentStatus = 1446134128;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446108995;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582261.0#1446105631";
+        RemoteSysCpu = 2.770000000000000E+02;
+        LastRejMatchTime = 1446108994;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 2.513300000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 906;
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/407/,/home/xguo23/finally_2/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 76112;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1604#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 407+407";
+        CumulativeSlotTime = 2.513300000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 313;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 25132;
+        ImageSize = 125000
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446121054;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.293400000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.846290000000000E+05;
+        ResidentSetSize = 150000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_37424,ChtcWrapper409.out,AuditLog.409,simu_3_409.txt,harvest.log,409.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 1.305100000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.787300000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409";
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134104;
+        LastMatchTime = 1446121053;
+        LastJobLeaseRenewal = 1446134104;
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49583239;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=409 -- 3";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134104;
+        QDate = 1446106003;
+        JobLeaseDuration = 2400;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409/process.log";
+        JobCurrentStartDate = 1446121053;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "409+409";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 13051;
+        AutoClusterId = 24;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.242";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582200;
+        EnteredCurrentStatus = 1446134104;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446121053;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583239.0#1446106003";
+        RemoteSysCpu = 9.300000000000000E+01;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.305100000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e442.chtc.WISC.EDU";
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/409/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 127216;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.242:38884>#1443991450#10456#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 409+409";
+        CumulativeSlotTime = 1.305100000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 13050;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1445943853;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.852360000000000E+05;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.843670000000000E+05;
+        ResidentSetSize = 125000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 1.902470000000000E+05;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.076600000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134099;
+        QDate = 1445938922;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134099;
+        LastMatchTime = 1445943852;
+        LastJobLeaseRenewal = 1446134099;
+        DAGManNodesLog = "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49573720;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/finally/Simulation_condor/model_3/180/process.log";
+        JobCurrentStartDate = 1445943852;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "180+180";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 190247;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.72";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49572657;
+        EnteredCurrentStatus = 1446134099;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1445943852;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49573720.0#1445938922";
+        RemoteSysCpu = 1.835000000000000E+03;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.902470000000000E+05;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e272.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 123680;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.72:29075>#1444753997#6000#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 180+180";
+        CumulativeSlotTime = 1.902470000000000E+05;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 190245;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/finally/Simulation_condor/chtcjobwrapper";
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/finally/Simulation_condor/model_3/180"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446114726;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.908100000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.846290000000000E+05;
+        ResidentSetSize = 75000;
+        StreamOut = false;
+        SpooledOutputFiles = "harvest.log,232.out,ChtcWrapper232.out,AuditLog.232,CURLTIME_1864147,simu_3_232.txt";
+        OnExitRemove = true;
+        ImageSize_RAW = 118772;
+        RemoteWallClockTime = 1.933800000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.791100000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 12;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 26436;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134062;
+        QDate = 1446105779;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134062;
+        LastMatchTime = 1446114724;
+        LastJobLeaseRenewal = 1446134062;
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582659;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232/process.log";
+        JobCurrentStartDate = 1446114724;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=232 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "232+232";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 19338;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582200;
+        EnteredCurrentStatus = 1446134062;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446114724;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582659.0#1446105779";
+        RemoteSysCpu = 1.790000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.933800000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 615;
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/232/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 71268;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1612#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 232+232";
+        CumulativeSlotTime = 1.933800000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 3;
+        StreamErr = false;
+        DiskUsage_RAW = 1216668;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 19336;
+        ImageSize = 125000;
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper";
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446133964;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.200000000000000E+01;
+        NiceUser = false;
+        BytesRecvd = 1.220270000000000E+06;
+        RequestMemory = 1000;
+        ResidentSetSize = 7500;
+        StreamOut = false;
+        SpooledOutputFiles = "R2011b_INFO,CODEBLOWUP,AuditLog.10012,SLIBS2.tar.gz,ChtcWrapper10012.out,CURLTIME_2575055,chtcinnerwrapper";
+        OnExitRemove = true;
+        ImageSize_RAW = 5128;
+        RemoteWallClockTime = 7.700000000000000E+01;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 5;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.727355000000000E+06;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 160;
+        TransferInputSizeMB = 1;
+        Matlab = "R2011b";
+        BlockReadKbytes = 160;
+        LocalSysCpu = 0.0;
+        WantGlidein = true;
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/10012";
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper";
+        RecentStatsLifetimeStarter = 67;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "dentler";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134040;
+        LastMatchTime = 1446133963;
+        LastJobLeaseRenewal = 1446134040;
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log";
+        ClusterId = 49583905;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=10012 --";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134040;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "dentler@chtc.wisc.edu";
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/10012/process.log";
+        JobCurrentStartDate = 1446133963;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        JobLeaseDuration = 2400;
+        QDate = 1446133888;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "10012+10012";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 77;
+        AutoClusterId = 38267;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.244.69";
+        WantFlocking = true;
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49583804;
+        EnteredCurrentStatus = 1446134040;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446133963;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583905.0#1446133888";
+        RemoteSysCpu = 1.200000000000000E+01;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 7.700000000000000E+01;
+        WantCheckpoint = false;
+        BlockReads = 12;
+        LastRemoteHost = "slot1_2@e189.chtc.wisc.edu";
+        TransferInput = "/home/dentler/ChtcRun/project_auction/10012/,/home/dentler/ChtcRun/project_auction/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 4000000;
+        ResidentSetSize_RAW = 5128;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.244.69:4177>#1444973293#3769#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 10012+10012";
+        CumulativeSlotTime = 7.700000000000000E+01;
+        JobRunCount = 1;
+        RecentBlockReads = 12;
+        StreamErr = false;
+        DiskUsage_RAW = 1211433;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 76;
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446115779;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.811800000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.847170000000000E+05;
+        ResidentSetSize = 150000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_3140097,ChtcWrapper3.out,AuditLog.3,simu_3_3.txt,harvest.log,3.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 1.824800000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.789600000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134026;
+        QDate = 1446105835;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134026;
+        LastMatchTime = 1446115778;
+        LastJobLeaseRenewal = 1446134026;
+        DAGManNodesLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582786;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3/process.log";
+        JobCurrentStartDate = 1446115778;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=3 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "3+3";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 18248;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.107";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582778;
+        EnteredCurrentStatus = 1446134026;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446115778;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582786.0#1446105835";
+        RemoteSysCpu = 1.080000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.824800000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e307.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/model_3_1.46/Simulation_condor/data/3/,/home/xguo23/model_3_1.46/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 125940;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.107:63744>#1444685448#11070#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 3+3";
+        CumulativeSlotTime = 1.824800000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 18247;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/model_3_1.46/Simulation_condor/chtcjobwrapper";
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3"
+    ]
+
+    [
+        BlockWrites = 506;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446133964;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.100000000000000E+01;
+        NiceUser = false;
+        BytesRecvd = 1.220270000000000E+06;
+        RequestMemory = 1000;
+        ResidentSetSize = 7500;
+        StreamOut = false;
+        SpooledOutputFiles = "chtcinnerwrapper,SLIBS2.tar.gz,R2011b_INFO,AuditLog.20111,CURLTIME_1051736,ChtcWrapper20111.out,CODEBLOWUP";
+        OnExitRemove = true;
+        ImageSize_RAW = 5056;
+        RemoteWallClockTime = 5.800000000000000E+01;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 5;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.727274000000000E+06;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 164;
+        TransferInputSizeMB = 1;
+        Matlab = "R2011b";
+        BlockReadKbytes = 164;
+        LocalSysCpu = 0.0;
+        WantGlidein = true;
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/20111";
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper";
+        RecentStatsLifetimeStarter = 43;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "dentler";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 506;
+        CompletionDate = 1446134021;
+        LastMatchTime = 1446133963;
+        LastJobLeaseRenewal = 1446134021;
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log";
+        ClusterId = 49583938;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=20111 --";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134021;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "dentler@chtc.wisc.edu";
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/20111/process.log";
+        JobCurrentStartDate = 1446133963;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        JobLeaseDuration = 2400;
+        QDate = 1446133922;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "20111+20111";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 58;
+        AutoClusterId = 38259;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.244.37";
+        WantFlocking = true;
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 249656;
+        ExitBySignal = false;
+        DAGManJobId = 49583804;
+        EnteredCurrentStatus = 1446134021;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 249656;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446133963;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583938.0#1446133922";
+        RemoteSysCpu = 7.000000000000000E+00;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 5.800000000000000E+01;
+        WantCheckpoint = false;
+        BlockReads = 16;
+        LastRemoteHost = "slot1_10@e168.chtc.wisc.edu";
+        TransferInput = "/home/dentler/ChtcRun/project_auction/20111/,/home/dentler/ChtcRun/project_auction/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 4000000;
+        ResidentSetSize_RAW = 5056;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.244.37:57713>#1445396629#2313#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 20111+20111";
+        CumulativeSlotTime = 5.800000000000000E+01;
+        JobRunCount = 1;
+        RecentBlockReads = 16;
+        StreamErr = false;
+        DiskUsage_RAW = 1205568;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 52;
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446115115;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.878200000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.846290000000000E+05;
+        ResidentSetSize = 125000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_2890029,ChtcWrapper260.out,AuditLog.260,simu_3_260.txt,harvest.log,260.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 1.890300000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.050700000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134017;
+        QDate = 1446105803;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134017;
+        LastMatchTime = 1446115114;
+        LastJobLeaseRenewal = 1446134017;
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582724;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/260/process.log";
+        JobCurrentStartDate = 1446115114;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=260 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "260+260";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 18903;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.164";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582200;
+        EnteredCurrentStatus = 1446134017;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446115114;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582724.0#1446105803";
+        RemoteSysCpu = 1.090000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.890300000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRe

<TRUNCATED>


[24/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedJobNotificationHandler.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedJobNotificationHandler.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedJobNotificationHandler.java
deleted file mode 100644
index 49b88ca..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedJobNotificationHandler.java
+++ /dev/null
@@ -1,742 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.rmi.RemoteException;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.Date;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.commons.lang3.StringUtils;
-import org.apache.asterix.common.exceptions.ACIDException;
-import org.apache.asterix.external.feed.api.IFeedJoint;
-import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
-import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
-import org.apache.asterix.external.feed.api.IFeedJoint.State;
-import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber.FeedLifecycleEvent;
-import org.apache.asterix.external.feed.management.FeedConnectionId;
-import org.apache.asterix.external.feed.management.FeedConnectionRequest;
-import org.apache.asterix.external.feed.management.FeedId;
-import org.apache.asterix.external.feed.management.FeedJointKey;
-import org.apache.asterix.external.feed.management.FeedWorkManager;
-import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
-import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
-import org.apache.asterix.external.feed.watch.FeedActivity;
-import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
-import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
-import org.apache.asterix.external.feed.watch.FeedJobInfo;
-import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
-import org.apache.asterix.external.feed.watch.FeedJobInfo.JobType;
-import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
-import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
-import org.apache.asterix.external.operators.FeedMetaOperatorDescriptor;
-import org.apache.asterix.feed.FeedLifecycleListener.Message;
-import org.apache.asterix.feed.FeedWorkCollection.SubscribeFeedWork;
-import org.apache.asterix.metadata.feeds.BuiltinFeedPolicies;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.algebricks.runtime.base.IPushRuntimeFactory;
-import org.apache.hyracks.algebricks.runtime.operators.meta.AlgebricksMetaOperatorDescriptor;
-import org.apache.hyracks.algebricks.runtime.operators.std.AssignRuntimeFactory;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.dataflow.IConnectorDescriptor;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.dataflow.OperatorDescriptorId;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobInfo;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.api.job.JobStatus;
-import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexInsertUpdateDeleteOperatorDescriptor;
-
-public class FeedJobNotificationHandler implements Runnable {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJobNotificationHandler.class.getName());
-
-    private final LinkedBlockingQueue<Message> inbox;
-    private final Map<FeedConnectionId, List<IFeedLifecycleEventSubscriber>> eventSubscribers;
-
-    private final Map<JobId, FeedJobInfo> jobInfos;
-    private final Map<FeedId, FeedIntakeInfo> intakeJobInfos;
-    private final Map<FeedConnectionId, FeedConnectJobInfo> connectJobInfos;
-    private final Map<FeedId, List<IFeedJoint>> feedPipeline;
-    private final Map<FeedConnectionId, Pair<IIntakeProgressTracker, Long>> feedIntakeProgressTrackers;
-
-    public FeedJobNotificationHandler(LinkedBlockingQueue<Message> inbox) {
-        this.inbox = inbox;
-        this.jobInfos = new HashMap<JobId, FeedJobInfo>();
-        this.intakeJobInfos = new HashMap<FeedId, FeedIntakeInfo>();
-        this.connectJobInfos = new HashMap<FeedConnectionId, FeedConnectJobInfo>();
-        this.feedPipeline = new HashMap<FeedId, List<IFeedJoint>>();
-        this.eventSubscribers = new HashMap<FeedConnectionId, List<IFeedLifecycleEventSubscriber>>();
-        this.feedIntakeProgressTrackers = new HashMap<FeedConnectionId, Pair<IIntakeProgressTracker, Long>>();
-    }
-
-    @Override
-    public void run() {
-        Message mesg;
-        while (true) {
-            try {
-                mesg = inbox.take();
-                switch (mesg.messageKind) {
-                    case JOB_START:
-                        handleJobStartMessage(mesg);
-                        break;
-                    case JOB_FINISH:
-                        handleJobFinishMessage(mesg);
-                        break;
-                }
-            } catch (Exception e) {
-                e.printStackTrace();
-            }
-
-        }
-    }
-
-    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
-            IIntakeProgressTracker feedIntakeProgressTracker) {
-        if (feedIntakeProgressTrackers.get(connectionId) == null) {
-            this.feedIntakeProgressTrackers.put(connectionId, new Pair<IIntakeProgressTracker, Long>(
-                    feedIntakeProgressTracker, 0L));
-        } else {
-            throw new IllegalStateException(" Progress tracker for connection " + connectionId
-                    + " is alreader registered");
-        }
-    }
-
-    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
-        this.feedIntakeProgressTrackers.remove(connectionId);
-    }
-
-    public void updateTrackingInformation(StorageReportFeedMessage srm) {
-        Pair<IIntakeProgressTracker, Long> p = feedIntakeProgressTrackers.get(srm.getConnectionId());
-        if (p != null && p.second < srm.getLastPersistedTupleIntakeTimestamp()) {
-            p.second = srm.getLastPersistedTupleIntakeTimestamp();
-            p.first.notifyIngestedTupleTimestamp(p.second);
-        }
-    }
-
-    public Collection<FeedIntakeInfo> getFeedIntakeInfos() {
-        return intakeJobInfos.values();
-    }
-
-    public Collection<FeedConnectJobInfo> getFeedConnectInfos() {
-        return connectJobInfos.values();
-    }
-
-    public void registerFeedJoint(IFeedJoint feedJoint) {
-        List<IFeedJoint> feedJointsOnPipeline = feedPipeline.get(feedJoint.getOwnerFeedId());
-        if (feedJointsOnPipeline == null) {
-            feedJointsOnPipeline = new ArrayList<IFeedJoint>();
-            feedPipeline.put(feedJoint.getOwnerFeedId(), feedJointsOnPipeline);
-            feedJointsOnPipeline.add(feedJoint);
-        } else {
-            if (!feedJointsOnPipeline.contains(feedJoint)) {
-                feedJointsOnPipeline.add(feedJoint);
-            } else {
-                throw new IllegalArgumentException("Feed joint " + feedJoint + " already registered");
-            }
-        }
-    }
-
-    public void registerFeedIntakeJob(FeedId feedId, JobId jobId, JobSpecification jobSpec) throws HyracksDataException {
-        if (jobInfos.get(jobId) != null) {
-            throw new IllegalStateException("Feed job already registered");
-        }
-
-        List<IFeedJoint> joints = feedPipeline.get(feedId);
-        IFeedJoint intakeJoint = null;
-        for (IFeedJoint joint : joints) {
-            if (joint.getType().equals(IFeedJoint.FeedJointType.INTAKE)) {
-                intakeJoint = joint;
-                break;
-            }
-        }
-
-        if (intakeJoint != null) {
-            FeedIntakeInfo intakeJobInfo = new FeedIntakeInfo(jobId, FeedJobState.CREATED, FeedJobInfo.JobType.INTAKE,
-                    feedId, intakeJoint, jobSpec);
-            intakeJobInfos.put(feedId, intakeJobInfo);
-            jobInfos.put(jobId, intakeJobInfo);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Registered feed intake [" + jobId + "]" + " for feed " + feedId);
-            }
-        } else {
-            throw new HyracksDataException("Could not register feed intake job [" + jobId + "]" + " for feed  "
-                    + feedId);
-        }
-    }
-
-    public void registerFeedCollectionJob(FeedId sourceFeedId, FeedConnectionId connectionId, JobId jobId,
-            JobSpecification jobSpec, Map<String, String> feedPolicy) {
-        if (jobInfos.get(jobId) != null) {
-            throw new IllegalStateException("Feed job already registered");
-        }
-
-        List<IFeedJoint> feedJoints = feedPipeline.get(sourceFeedId);
-        FeedConnectionId cid = null;
-        IFeedJoint sourceFeedJoint = null;
-        for (IFeedJoint joint : feedJoints) {
-            cid = joint.getReceiver(connectionId);
-            if (cid != null) {
-                sourceFeedJoint = joint;
-                break;
-            }
-        }
-
-        if (cid != null) {
-            FeedConnectJobInfo cInfo = new FeedConnectJobInfo(jobId, FeedJobState.CREATED, connectionId,
-                    sourceFeedJoint, null, jobSpec, feedPolicy);
-            jobInfos.put(jobId, cInfo);
-            connectJobInfos.put(connectionId, cInfo);
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Registered feed connection [" + jobId + "]" + " for feed " + connectionId);
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Could not register feed collection job [" + jobId + "]" + " for feed connection "
-                        + connectionId);
-            }
-        }
-
-    }
-
-    public void deregisterFeedIntakeJob(JobId jobId) {
-        if (jobInfos.get(jobId) == null) {
-            throw new IllegalStateException(" Feed Intake job not registered ");
-        }
-
-        FeedIntakeInfo info = (FeedIntakeInfo) jobInfos.get(jobId);
-        jobInfos.remove(jobId);
-        intakeJobInfos.remove(info.getFeedId());
-
-        if (!info.getState().equals(FeedJobState.UNDER_RECOVERY)) {
-            List<IFeedJoint> joints = feedPipeline.get(info.getFeedId());
-            joints.remove(info.getIntakeFeedJoint());
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Deregistered feed intake job [" + jobId + "]");
-            }
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Not removing feed joint as intake job is in " + FeedJobState.UNDER_RECOVERY + " state.");
-            }
-        }
-
-    }
-
-    private void handleJobStartMessage(Message message) throws Exception {
-        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
-        switch (jobInfo.getJobType()) {
-            case INTAKE:
-                handleIntakeJobStartMessage((FeedIntakeInfo) jobInfo);
-                break;
-            case FEED_CONNECT:
-                handleCollectJobStartMessage((FeedConnectJobInfo) jobInfo);
-                break;
-        }
-
-    }
-
-    private void handleJobFinishMessage(Message message) throws Exception {
-        FeedJobInfo jobInfo = jobInfos.get(message.jobId);
-        switch (jobInfo.getJobType()) {
-            case INTAKE:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Intake Job finished for feed intake " + jobInfo.getJobId());
-                }
-                handleFeedIntakeJobFinishMessage((FeedIntakeInfo) jobInfo, message);
-                break;
-            case FEED_CONNECT:
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Collect Job finished for  " + (FeedConnectJobInfo) jobInfo);
-                }
-                handleFeedCollectJobFinishMessage((FeedConnectJobInfo) jobInfo);
-                break;
-        }
-
-    }
-
-    private synchronized void handleIntakeJobStartMessage(FeedIntakeInfo intakeJobInfo) throws Exception {
-        List<OperatorDescriptorId> intakeOperatorIds = new ArrayList<OperatorDescriptorId>();
-        Map<OperatorDescriptorId, IOperatorDescriptor> operators = intakeJobInfo.getSpec().getOperatorMap();
-        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
-            IOperatorDescriptor opDesc = entry.getValue();
-            if (opDesc instanceof FeedIntakeOperatorDescriptor) {
-                intakeOperatorIds.add(opDesc.getOperatorId());
-            }
-        }
-
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(intakeJobInfo.getJobId());
-        List<String> intakeLocations = new ArrayList<String>();
-        for (OperatorDescriptorId intakeOperatorId : intakeOperatorIds) {
-            Map<Integer, String> operatorLocations = info.getOperatorLocations().get(intakeOperatorId);
-            int nOperatorInstances = operatorLocations.size();
-            for (int i = 0; i < nOperatorInstances; i++) {
-                intakeLocations.add(operatorLocations.get(i));
-            }
-        }
-        // intakeLocations is an ordered list; element at position i corresponds to location of i'th instance of operator
-        intakeJobInfo.setIntakeLocation(intakeLocations);
-        intakeJobInfo.getIntakeFeedJoint().setState(State.ACTIVE);
-        intakeJobInfo.setState(FeedJobState.ACTIVE);
-
-        // notify event listeners
-        notifyFeedEventSubscribers(intakeJobInfo, FeedLifecycleEvent.FEED_INTAKE_STARTED);
-    }
-
-    private void handleCollectJobStartMessage(FeedConnectJobInfo cInfo) throws RemoteException, ACIDException {
-        // set locations of feed sub-operations (intake, compute, store)
-        setLocations(cInfo);
-
-        // activate joints
-        List<IFeedJoint> joints = feedPipeline.get(cInfo.getConnectionId().getFeedId());
-        for (IFeedJoint joint : joints) {
-            if (joint.getProvider().equals(cInfo.getConnectionId())) {
-                joint.setState(State.ACTIVE);
-                if (joint.getType().equals(IFeedJoint.FeedJointType.COMPUTE)) {
-                    cInfo.setComputeFeedJoint(joint);
-                }
-            }
-        }
-        cInfo.setState(FeedJobState.ACTIVE);
-
-        // register activity in metadata
-        registerFeedActivity(cInfo);
-        // notify event listeners
-        notifyFeedEventSubscribers(cInfo, FeedLifecycleEvent.FEED_COLLECT_STARTED);
-    }
-
-    private void notifyFeedEventSubscribers(FeedJobInfo jobInfo, FeedLifecycleEvent event) {
-        JobType jobType = jobInfo.getJobType();
-        List<FeedConnectionId> impactedConnections = new ArrayList<FeedConnectionId>();
-        if (jobType.equals(JobType.INTAKE)) {
-            FeedId feedId = ((FeedIntakeInfo) jobInfo).getFeedId();
-            for (FeedConnectionId connId : eventSubscribers.keySet()) {
-                if (connId.getFeedId().equals(feedId)) {
-                    impactedConnections.add(connId);
-                }
-            }
-        } else {
-            impactedConnections.add(((FeedConnectJobInfo) jobInfo).getConnectionId());
-        }
-
-        for (FeedConnectionId connId : impactedConnections) {
-            List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connId);
-            if (subscribers != null && !subscribers.isEmpty()) {
-                for (IFeedLifecycleEventSubscriber subscriber : subscribers) {
-                    subscriber.handleFeedEvent(event);
-                }
-            }
-        }
-    }
-
-    public synchronized void submitFeedConnectionRequest(IFeedJoint feedJoint, final FeedConnectionRequest request)
-            throws Exception {
-        List<String> locations = null;
-        switch (feedJoint.getType()) {
-            case INTAKE:
-                FeedIntakeInfo intakeInfo = intakeJobInfos.get(feedJoint.getOwnerFeedId());
-                locations = intakeInfo.getIntakeLocation();
-                break;
-            case COMPUTE:
-                FeedConnectionId connectionId = feedJoint.getProvider();
-                FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-                locations = cInfo.getComputeLocations();
-                break;
-        }
-
-        SubscribeFeedWork work = new SubscribeFeedWork(locations.toArray(new String[] {}), request);
-        FeedWorkManager.INSTANCE.submitWork(work, new SubscribeFeedWork.FeedSubscribeWorkEventListener());
-    }
-
-    public IFeedJoint getSourceFeedJoint(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        if (cInfo != null) {
-            return cInfo.getSourceFeedJoint();
-        }
-        return null;
-    }
-
-    public Set<FeedConnectionId> getActiveFeedConnections() {
-        Set<FeedConnectionId> activeConnections = new HashSet<FeedConnectionId>();
-        for (FeedConnectJobInfo cInfo : connectJobInfos.values()) {
-            if (cInfo.getState().equals(FeedJobState.ACTIVE)) {
-                activeConnections.add(cInfo.getConnectionId());
-            }
-        }
-        return activeConnections;
-    }
-
-    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        if (cInfo != null) {
-            return cInfo.getState().equals(FeedJobState.ACTIVE);
-        }
-        return false;
-    }
-
-    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
-        FeedConnectJobInfo connectJobInfo = connectJobInfos.get(connectionId);
-        connectJobInfo.setState(jobState);
-    }
-
-    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getState();
-    }
-
-    private void handleFeedIntakeJobFinishMessage(FeedIntakeInfo intakeInfo, Message message) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(message.jobId);
-        JobStatus status = info.getStatus();
-        FeedLifecycleEvent event;
-        event = status.equals(JobStatus.FAILURE) ? FeedLifecycleEvent.FEED_INTAKE_FAILURE
-                : FeedLifecycleEvent.FEED_ENDED;
-
-        // remove feed joints
-        deregisterFeedIntakeJob(message.jobId);
-
-        // notify event listeners
-        notifyFeedEventSubscribers(intakeInfo, event);
-
-    }
-
-    private void handleFeedCollectJobFinishMessage(FeedConnectJobInfo cInfo) throws Exception {
-        FeedConnectionId connectionId = cInfo.getConnectionId();
-
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobInfo info = hcc.getJobInfo(cInfo.getJobId());
-        JobStatus status = info.getStatus();
-        boolean failure = status != null && status.equals(JobStatus.FAILURE);
-        FeedPolicyAccessor fpa = new FeedPolicyAccessor(cInfo.getFeedPolicy());
-
-        boolean removeJobHistory = !failure;
-        boolean retainSubsription = cInfo.getState().equals(FeedJobState.UNDER_RECOVERY)
-                || (failure && fpa.continueOnHardwareFailure());
-
-        if (!retainSubsription) {
-            IFeedJoint feedJoint = cInfo.getSourceFeedJoint();
-            feedJoint.removeReceiver(connectionId);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Subscription " + cInfo.getConnectionId() + " completed successfully. Removed subscription");
-            }
-            removeFeedJointsPostPipelineTermination(cInfo.getConnectionId());
-        }
-
-        if (removeJobHistory) {
-            connectJobInfos.remove(connectionId);
-            jobInfos.remove(cInfo.getJobId());
-            feedIntakeProgressTrackers.remove(cInfo.getConnectionId());
-        }
-        deregisterFeedActivity(cInfo);
-
-        // notify event listeners
-        FeedLifecycleEvent event = failure ? FeedLifecycleEvent.FEED_COLLECT_FAILURE : FeedLifecycleEvent.FEED_ENDED;
-        notifyFeedEventSubscribers(cInfo, event);
-    }
-
-    private void registerFeedActivity(FeedConnectJobInfo cInfo) {
-        Map<String, String> feedActivityDetails = new HashMap<String, String>();
-
-        if (cInfo.getCollectLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.INTAKE_LOCATIONS,
-                    StringUtils.join(cInfo.getCollectLocations().iterator(), ','));
-        }
-
-        if (cInfo.getComputeLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.COMPUTE_LOCATIONS,
-                    StringUtils.join(cInfo.getComputeLocations().iterator(), ','));
-        }
-
-        if (cInfo.getStorageLocations() != null) {
-            feedActivityDetails.put(FeedActivity.FeedActivityDetails.STORAGE_LOCATIONS,
-                    StringUtils.join(cInfo.getStorageLocations().iterator(), ','));
-        }
-
-        String policyName = cInfo.getFeedPolicy().get(BuiltinFeedPolicies.CONFIG_FEED_POLICY_KEY);
-        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_POLICY_NAME, policyName);
-
-        feedActivityDetails.put(FeedActivity.FeedActivityDetails.FEED_CONNECT_TIMESTAMP, (new Date()).toString());
-        try {
-            FeedActivity feedActivity = new FeedActivity(cInfo.getConnectionId().getFeedId().getDataverse(), cInfo
-                    .getConnectionId().getFeedId().getFeedName(), cInfo.getConnectionId().getDatasetName(),
-                    feedActivityDetails);
-            CentralFeedManager.getInstance().getFeedLoadManager()
-                    .reportFeedActivity(cInfo.getConnectionId(), feedActivity);
-
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to register feed activity for " + cInfo + " " + e.getMessage());
-            }
-
-        }
-
-    }
-
-    public void deregisterFeedActivity(FeedConnectJobInfo cInfo) {
-        try {
-            CentralFeedManager.getInstance().getFeedLoadManager().removeFeedActivity(cInfo.getConnectionId());
-        } catch (Exception e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Unable to deregister feed activity for " + cInfo + " " + e.getMessage());
-            }
-        }
-    }
-
-    public void removeFeedJointsPostPipelineTermination(FeedConnectionId connectionId) {
-        FeedConnectJobInfo cInfo = connectJobInfos.get(connectionId);
-        List<IFeedJoint> feedJoints = feedPipeline.get(connectionId.getFeedId());
-
-        IFeedJoint sourceJoint = cInfo.getSourceFeedJoint();
-        List<FeedConnectionId> all = sourceJoint.getReceivers();
-        boolean removeSourceJoint = all.size() < 2;
-        if (removeSourceJoint) {
-            feedJoints.remove(sourceJoint);
-        }
-
-        IFeedJoint computeJoint = cInfo.getComputeFeedJoint();
-        if (computeJoint != null && computeJoint.getReceivers().size() < 2) {
-            feedJoints.remove(computeJoint);
-        }
-    }
-
-    public boolean isRegisteredFeedJob(JobId jobId) {
-        return jobInfos.get(jobId) != null;
-    }
-
-    public List<String> getFeedComputeLocations(FeedId feedId) {
-        List<IFeedJoint> feedJoints = feedPipeline.get(feedId);
-        for (IFeedJoint joint : feedJoints) {
-            if (joint.getFeedJointKey().getFeedId().equals(feedId)) {
-                return connectJobInfos.get(joint.getProvider()).getComputeLocations();
-            }
-        }
-        return null;
-    }
-
-    public List<String> getFeedStorageLocations(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getStorageLocations();
-    }
-
-    public List<String> getFeedCollectLocations(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getCollectLocations();
-    }
-
-    public List<String> getFeedIntakeLocations(FeedId feedId) {
-        return intakeJobInfos.get(feedId).getIntakeLocation();
-    }
-
-    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getJobId();
-    }
-
-    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
-        if (subscribers == null) {
-            subscribers = new ArrayList<IFeedLifecycleEventSubscriber>();
-            eventSubscribers.put(connectionId, subscribers);
-        }
-        subscribers.add(subscriber);
-    }
-
-    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        List<IFeedLifecycleEventSubscriber> subscribers = eventSubscribers.get(connectionId);
-        if (subscribers != null) {
-            subscribers.remove(subscriber);
-        }
-    }
-
-    //============================
-
-    public boolean isFeedPointAvailable(FeedJointKey feedJointKey) {
-        List<IFeedJoint> joints = feedPipeline.get(feedJointKey.getFeedId());
-        if (joints != null && !joints.isEmpty()) {
-            for (IFeedJoint joint : joints) {
-                if (joint.getFeedJointKey().equals(feedJointKey)) {
-                    return true;
-                }
-            }
-        }
-        return false;
-    }
-
-    public Collection<IFeedJoint> getFeedIntakeJoints() {
-        List<IFeedJoint> intakeFeedPoints = new ArrayList<IFeedJoint>();
-        for (FeedIntakeInfo info : intakeJobInfos.values()) {
-            intakeFeedPoints.add(info.getIntakeFeedJoint());
-        }
-        return intakeFeedPoints;
-    }
-
-    public IFeedJoint getFeedJoint(FeedJointKey feedPointKey) {
-        List<IFeedJoint> joints = feedPipeline.get(feedPointKey.getFeedId());
-        if (joints != null && !joints.isEmpty()) {
-            for (IFeedJoint joint : joints) {
-                if (joint.getFeedJointKey().equals(feedPointKey)) {
-                    return joint;
-                }
-            }
-        }
-        return null;
-    }
-
-    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
-        IFeedJoint feedJoint = getFeedJoint(feedJointKey);
-        if (feedJoint != null) {
-            return feedJoint;
-        } else {
-            String jointKeyString = feedJointKey.getStringRep();
-            List<IFeedJoint> jointsOnPipeline = feedPipeline.get(feedJointKey.getFeedId());
-            IFeedJoint candidateJoint = null;
-            if (jointsOnPipeline != null) {
-                for (IFeedJoint joint : jointsOnPipeline) {
-                    if (jointKeyString.contains(joint.getFeedJointKey().getStringRep())) {
-                        if (candidateJoint == null) {
-                            candidateJoint = joint;
-                        } else if (joint.getFeedJointKey().getStringRep()
-                                .contains(candidateJoint.getFeedJointKey().getStringRep())) { // found feed point is a super set of the earlier find
-                            candidateJoint = joint;
-                        }
-                    }
-                }
-            }
-            return candidateJoint;
-        }
-    }
-
-    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId).getSpec();
-    }
-
-    public IFeedJoint getFeedPoint(FeedId sourceFeedId, IFeedJoint.FeedJointType type) {
-        List<IFeedJoint> joints = feedPipeline.get(sourceFeedId);
-        for (IFeedJoint joint : joints) {
-            if (joint.getType().equals(type)) {
-                return joint;
-            }
-        }
-        return null;
-    }
-
-    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
-        return connectJobInfos.get(connectionId);
-    }
-
-    private void setLocations(FeedConnectJobInfo cInfo) {
-        JobSpecification jobSpec = cInfo.getSpec();
-
-        List<OperatorDescriptorId> collectOperatorIds = new ArrayList<OperatorDescriptorId>();
-        List<OperatorDescriptorId> computeOperatorIds = new ArrayList<OperatorDescriptorId>();
-        List<OperatorDescriptorId> storageOperatorIds = new ArrayList<OperatorDescriptorId>();
-
-        Map<OperatorDescriptorId, IOperatorDescriptor> operators = jobSpec.getOperatorMap();
-        for (Entry<OperatorDescriptorId, IOperatorDescriptor> entry : operators.entrySet()) {
-            IOperatorDescriptor opDesc = entry.getValue();
-            IOperatorDescriptor actualOp = null;
-            if (opDesc instanceof FeedMetaOperatorDescriptor) {
-                actualOp = ((FeedMetaOperatorDescriptor) opDesc).getCoreOperator();
-            } else {
-                actualOp = opDesc;
-            }
-
-            if (actualOp instanceof AlgebricksMetaOperatorDescriptor) {
-                AlgebricksMetaOperatorDescriptor op = ((AlgebricksMetaOperatorDescriptor) actualOp);
-                IPushRuntimeFactory[] runtimeFactories = op.getPipeline().getRuntimeFactories();
-                boolean computeOp = false;
-                for (IPushRuntimeFactory rf : runtimeFactories) {
-                    if (rf instanceof AssignRuntimeFactory) {
-                        IConnectorDescriptor connDesc = jobSpec.getOperatorInputMap().get(op.getOperatorId()).get(0);
-                        IOperatorDescriptor sourceOp = jobSpec.getConnectorOperatorMap().get(connDesc.getConnectorId())
-                                .getLeft().getLeft();
-                        if (sourceOp instanceof FeedCollectOperatorDescriptor) {
-                            computeOp = true;
-                            break;
-                        }
-                    }
-                }
-                if (computeOp) {
-                    computeOperatorIds.add(entry.getKey());
-                }
-            } else if (actualOp instanceof LSMTreeIndexInsertUpdateDeleteOperatorDescriptor) {
-                storageOperatorIds.add(entry.getKey());
-            } else if (actualOp instanceof FeedCollectOperatorDescriptor) {
-                collectOperatorIds.add(entry.getKey());
-            }
-        }
-
-        try {
-            IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-            JobInfo info = hcc.getJobInfo(cInfo.getJobId());
-            List<String> collectLocations = new ArrayList<String>();
-            for (OperatorDescriptorId collectOpId : collectOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(collectOpId);
-                int nOperatorInstances = operatorLocations.size();
-                for (int i = 0; i < nOperatorInstances; i++) {
-                    collectLocations.add(operatorLocations.get(i));
-                }
-            }
-
-            List<String> computeLocations = new ArrayList<String>();
-            for (OperatorDescriptorId computeOpId : computeOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(computeOpId);
-                if (operatorLocations != null) {
-                    int nOperatorInstances = operatorLocations.size();
-                    for (int i = 0; i < nOperatorInstances; i++) {
-                        computeLocations.add(operatorLocations.get(i));
-                    }
-                } else {
-                    computeLocations.clear();
-                    computeLocations.addAll(collectLocations);
-                }
-            }
-
-            List<String> storageLocations = new ArrayList<String>();
-            for (OperatorDescriptorId storageOpId : storageOperatorIds) {
-                Map<Integer, String> operatorLocations = info.getOperatorLocations().get(storageOpId);
-                if (operatorLocations == null) {
-                    continue;
-                }
-                int nOperatorInstances = operatorLocations.size();
-                for (int i = 0; i < nOperatorInstances; i++) {
-                    storageLocations.add(operatorLocations.get(i));
-                }
-            }
-            cInfo.setCollectLocations(collectLocations);
-            cInfo.setComputeLocations(computeLocations);
-            cInfo.setStorageLocations(storageLocations);
-
-        } catch (Exception e) {
-            e.printStackTrace();
-        }
-
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedJoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedJoint.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedJoint.java
deleted file mode 100644
index 43f227d..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedJoint.java
+++ /dev/null
@@ -1,190 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.util.ArrayList;
-import java.util.List;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.external.feed.api.IFeedJoint;
-import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
-import org.apache.asterix.external.feed.management.FeedConnectionId;
-import org.apache.asterix.external.feed.management.FeedConnectionRequest;
-import org.apache.asterix.external.feed.management.FeedId;
-import org.apache.asterix.external.feed.management.FeedJointKey;
-
-public class FeedJoint implements IFeedJoint {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedJoint.class.getName());
-
-    /** A unique key associated with the feed point **/
-    private final FeedJointKey key;
-
-    /** The state associated with the FeedJoint **/
-    private State state;
-
-    /** A list of subscribers that receive data from this FeedJoint **/
-    private final List<FeedConnectionId> receivers;
-
-    /** The feedId on which the feedPoint resides **/
-    private final FeedId ownerFeedId;
-
-    /** A list of feed subscription requests submitted for subscribing to the FeedPoint's data **/
-    private final List<FeedConnectionRequest> connectionRequests;
-
-    private final ConnectionLocation connectionLocation;
-
-    private final FeedJointType type;
-
-    private FeedConnectionId provider;
-
-    public FeedJoint(FeedJointKey key, FeedId ownerFeedId, ConnectionLocation subscriptionLocation, FeedJointType type,
-            FeedConnectionId provider) {
-        this.key = key;
-        this.ownerFeedId = ownerFeedId;
-        this.type = type;
-        this.receivers = new ArrayList<FeedConnectionId>();
-        this.state = State.CREATED;
-        this.connectionLocation = subscriptionLocation;
-        this.connectionRequests = new ArrayList<FeedConnectionRequest>();
-        this.provider = provider;
-    }
-
-    @Override
-    public int hashCode() {
-        return key.hashCode();
-    }
-
-    public void addReceiver(FeedConnectionId connectionId) {
-        receivers.add(connectionId);
-    }
-
-    public void removeReceiver(FeedConnectionId connectionId) {
-        receivers.remove(connectionId);
-    }
-
-    public synchronized void addConnectionRequest(FeedConnectionRequest request) {
-        connectionRequests.add(request);
-        if (state.equals(State.ACTIVE)) {
-            handlePendingConnectionRequest();
-        }
-    }
-
-    public synchronized void setState(State state) {
-        if (this.state.equals(state)) {
-            return;
-        }
-        this.state = state;
-        if (this.state.equals(State.ACTIVE)) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Feed joint " + this + " is now " + State.ACTIVE);
-            }
-            handlePendingConnectionRequest();
-        }
-    }
-
-    private void handlePendingConnectionRequest() {
-        for (FeedConnectionRequest connectionRequest : connectionRequests) {
-            FeedConnectionId connectionId = new FeedConnectionId(connectionRequest.getReceivingFeedId(),
-                    connectionRequest.getTargetDataset());
-            try {
-                FeedLifecycleListener.INSTANCE.submitFeedConnectionRequest(this, connectionRequest);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Submitted feed connection request " + connectionRequest + " at feed joint " + this);
-                }
-                addReceiver(connectionId);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("Unsuccessful attempt at submitting connection request " + connectionRequest
-                            + " at feed joint " + this + ". Message " + e.getMessage());
-                }
-                e.printStackTrace();
-            }
-        }
-        connectionRequests.clear();
-    }
-
-    public FeedConnectionId getReceiver(FeedConnectionId connectionId) {
-        for (FeedConnectionId cid : receivers) {
-            if (cid.equals(connectionId)) {
-                return cid;
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public String toString() {
-        return key.toString() + " [" + connectionLocation + "]" + "[" + state + "]";
-    }
-
-    @Override
-    public boolean equals(Object o) {
-        if (o == null) {
-            return false;
-        }
-        if (o == this) {
-            return true;
-        }
-        if (!(o instanceof FeedJoint)) {
-            return false;
-        }
-        return ((FeedJoint) o).getFeedJointKey().equals(this.key);
-    }
-
-    public FeedId getOwnerFeedId() {
-        return ownerFeedId;
-    }
-
-    public List<FeedConnectionRequest> getConnectionRequests() {
-        return connectionRequests;
-    }
-
-    public ConnectionLocation getConnectionLocation() {
-        return connectionLocation;
-    }
-
-    public FeedJointType getType() {
-        return type;
-    }
-
-    @Override
-    public FeedConnectionId getProvider() {
-        return provider;
-    }
-
-    public List<FeedConnectionId> getReceivers() {
-        return receivers;
-    }
-
-    public FeedJointKey getKey() {
-        return key;
-    }
-
-    public synchronized State getState() {
-        return state;
-    }
-
-    @Override
-    public FeedJointKey getFeedJointKey() {
-        return key;
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedLifecycleListener.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedLifecycleListener.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedLifecycleListener.java
deleted file mode 100644
index aac3675..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedLifecycleListener.java
+++ /dev/null
@@ -1,499 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.io.PrintWriter;
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Map.Entry;
-import java.util.Set;
-import java.util.concurrent.ExecutorService;
-import java.util.concurrent.Executors;
-import java.util.concurrent.LinkedBlockingQueue;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.api.common.SessionConfig;
-import org.apache.asterix.api.common.SessionConfig.OutputFormat;
-import org.apache.asterix.aql.translator.QueryTranslator;
-import org.apache.asterix.common.api.IClusterManagementWork;
-import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
-import org.apache.asterix.common.api.IClusterManagementWorkResponse;
-import org.apache.asterix.compiler.provider.AqlCompilationProvider;
-import org.apache.asterix.compiler.provider.ILangCompilationProvider;
-import org.apache.asterix.external.feed.api.IFeedJoint;
-import org.apache.asterix.external.feed.api.IFeedLifecycleEventSubscriber;
-import org.apache.asterix.external.feed.api.IFeedLifecycleListener;
-import org.apache.asterix.external.feed.api.IIntakeProgressTracker;
-import org.apache.asterix.external.feed.management.FeedCollectInfo;
-import org.apache.asterix.external.feed.management.FeedConnectionId;
-import org.apache.asterix.external.feed.management.FeedConnectionRequest;
-import org.apache.asterix.external.feed.management.FeedId;
-import org.apache.asterix.external.feed.management.FeedJointKey;
-import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
-import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
-import org.apache.asterix.external.feed.watch.FeedIntakeInfo;
-import org.apache.asterix.external.feed.watch.FeedJobInfo;
-import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
-import org.apache.asterix.external.operators.FeedCollectOperatorDescriptor;
-import org.apache.asterix.external.operators.FeedIntakeOperatorDescriptor;
-import org.apache.asterix.lang.common.base.Statement;
-import org.apache.asterix.lang.common.statement.DataverseDecl;
-import org.apache.asterix.lang.common.statement.DisconnectFeedStatement;
-import org.apache.asterix.lang.common.struct.Identifier;
-import org.apache.asterix.metadata.MetadataManager;
-import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.cluster.AddNodeWork;
-import org.apache.asterix.metadata.cluster.ClusterManager;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.asterix.om.util.AsterixClusterProperties;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.exceptions.HyracksException;
-import org.apache.hyracks.api.job.IActivityClusterGraphGeneratorFactory;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-/**
- * A listener that subscribes to events associated with cluster membership
- * (nodes joining/leaving the cluster) and job lifecycle (start/end of a job).
- * Subscription to such events allows keeping track of feed ingestion jobs and
- * take any corrective action that may be required when a node involved in a
- * feed leaves the cluster.
- */
-public class FeedLifecycleListener implements IFeedLifecycleListener {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedLifecycleListener.class.getName());
-
-    public static FeedLifecycleListener INSTANCE = new FeedLifecycleListener();
-    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
-
-    private final LinkedBlockingQueue<Message> jobEventInbox;
-    private final LinkedBlockingQueue<IClusterManagementWorkResponse> responseInbox;
-    private final Map<FeedCollectInfo, List<String>> dependentFeeds = new HashMap<FeedCollectInfo, List<String>>();
-    private final Map<FeedConnectionId, LinkedBlockingQueue<String>> feedReportQueue;
-    private final FeedJobNotificationHandler feedJobNotificationHandler;
-    private final FeedWorkRequestResponseHandler feedWorkRequestResponseHandler;
-    private final ExecutorService executorService;
-
-    private ClusterState state;
-
-    private FeedLifecycleListener() {
-        this.jobEventInbox = new LinkedBlockingQueue<Message>();
-        this.feedJobNotificationHandler = new FeedJobNotificationHandler(jobEventInbox);
-        this.responseInbox = new LinkedBlockingQueue<IClusterManagementWorkResponse>();
-        this.feedWorkRequestResponseHandler = new FeedWorkRequestResponseHandler(responseInbox);
-        this.feedReportQueue = new HashMap<FeedConnectionId, LinkedBlockingQueue<String>>();
-        this.executorService = Executors.newCachedThreadPool();
-        this.executorService.execute(feedJobNotificationHandler);
-        this.executorService.execute(feedWorkRequestResponseHandler);
-        ClusterManager.INSTANCE.registerSubscriber(this);
-        this.state = AsterixClusterProperties.INSTANCE.getState();
-    }
-
-    @Override
-    public void notifyJobStart(JobId jobId) throws HyracksException {
-        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
-            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_START));
-        }
-    }
-
-    @Override
-    public void notifyJobFinish(JobId jobId) throws HyracksException {
-        if (feedJobNotificationHandler.isRegisteredFeedJob(jobId)) {
-            jobEventInbox.add(new Message(jobId, Message.MessageKind.JOB_FINISH));
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("NO NEED TO NOTIFY JOB FINISH!");
-            }
-        }
-    }
-
-    public FeedConnectJobInfo getFeedConnectJobInfo(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedConnectJobInfo(connectionId);
-    }
-
-    public void registerFeedIntakeProgressTracker(FeedConnectionId connectionId,
-            IIntakeProgressTracker feedIntakeProgressTracker) {
-        feedJobNotificationHandler.registerFeedIntakeProgressTracker(connectionId, feedIntakeProgressTracker);
-    }
-
-    public void deregisterFeedIntakeProgressTracker(FeedConnectionId connectionId) {
-        feedJobNotificationHandler.deregisterFeedIntakeProgressTracker(connectionId);
-    }
-
-    public void updateTrackingInformation(StorageReportFeedMessage srm) {
-        feedJobNotificationHandler.updateTrackingInformation(srm);
-    }
-
-    /*
-     * Traverse job specification to categorize job as a feed intake job or a feed collection job
-     */
-    @Override
-    public void notifyJobCreation(JobId jobId, IActivityClusterGraphGeneratorFactory acggf) throws HyracksException {
-        JobSpecification spec = acggf.getJobSpecification();
-        FeedConnectionId feedConnectionId = null;
-        Map<String, String> feedPolicy = null;
-        for (IOperatorDescriptor opDesc : spec.getOperatorMap().values()) {
-            if (opDesc instanceof FeedCollectOperatorDescriptor) {
-                feedConnectionId = ((FeedCollectOperatorDescriptor) opDesc).getFeedConnectionId();
-                feedPolicy = ((FeedCollectOperatorDescriptor) opDesc).getFeedPolicyProperties();
-                feedJobNotificationHandler.registerFeedCollectionJob(
-                        ((FeedCollectOperatorDescriptor) opDesc).getSourceFeedId(), feedConnectionId, jobId, spec,
-                        feedPolicy);
-                break;
-            } else if (opDesc instanceof FeedIntakeOperatorDescriptor) {
-                feedJobNotificationHandler.registerFeedIntakeJob(((FeedIntakeOperatorDescriptor) opDesc).getFeedId(),
-                        jobId, spec);
-                break;
-            }
-        }
-    }
-
-    public void setJobState(FeedConnectionId connectionId, FeedJobState jobState) {
-        feedJobNotificationHandler.setJobState(connectionId, jobState);
-    }
-
-    public FeedJobState getFeedJobState(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedJobState(connectionId);
-    }
-
-    public static class Message {
-        public JobId jobId;
-
-        public enum MessageKind {
-            JOB_START,
-            JOB_FINISH
-        }
-
-        public MessageKind messageKind;
-
-        public Message(JobId jobId, MessageKind msgKind) {
-            this.jobId = jobId;
-            this.messageKind = msgKind;
-        }
-    }
-
-    @Override
-    public Set<IClusterManagementWork> notifyNodeFailure(Set<String> deadNodeIds) {
-        Set<IClusterManagementWork> workToBeDone = new HashSet<IClusterManagementWork>();
-
-        Collection<FeedIntakeInfo> intakeInfos = feedJobNotificationHandler.getFeedIntakeInfos();
-        Collection<FeedConnectJobInfo> connectJobInfos = feedJobNotificationHandler.getFeedConnectInfos();
-
-        Map<String, List<FeedJobInfo>> impactedJobs = new HashMap<String, List<FeedJobInfo>>();
-
-        for (String deadNode : deadNodeIds) {
-            for (FeedIntakeInfo intakeInfo : intakeInfos) {
-                if (intakeInfo.getIntakeLocation().contains(deadNode)) {
-                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
-                    if (infos == null) {
-                        infos = new ArrayList<FeedJobInfo>();
-                        impactedJobs.put(deadNode, infos);
-                    }
-                    infos.add(intakeInfo);
-                    intakeInfo.setState(FeedJobState.UNDER_RECOVERY);
-                }
-            }
-
-            for (FeedConnectJobInfo connectInfo : connectJobInfos) {
-                if (connectInfo.getStorageLocations().contains(deadNode)) {
-                    continue;
-                }
-                if (connectInfo.getComputeLocations().contains(deadNode)
-                        || connectInfo.getCollectLocations().contains(deadNode)) {
-                    List<FeedJobInfo> infos = impactedJobs.get(deadNode);
-                    if (infos == null) {
-                        infos = new ArrayList<FeedJobInfo>();
-                        impactedJobs.put(deadNode, infos);
-                    }
-                    infos.add(connectInfo);
-                    connectInfo.setState(FeedJobState.UNDER_RECOVERY);
-                    feedJobNotificationHandler.deregisterFeedActivity(connectInfo);
-                }
-            }
-
-        }
-
-        if (impactedJobs.size() > 0) {
-            AddNodeWork addNodeWork = new AddNodeWork(deadNodeIds, deadNodeIds.size(), this);
-            feedWorkRequestResponseHandler.registerFeedWork(addNodeWork.getWorkId(), impactedJobs);
-            workToBeDone.add(addNodeWork);
-        }
-        return workToBeDone;
-
-    }
-
-    public static class FailureReport {
-
-        private final List<Pair<FeedConnectJobInfo, List<String>>> recoverableConnectJobs;
-        private final Map<IFeedJoint, List<String>> recoverableIntakeFeedIds;
-
-        public FailureReport(Map<IFeedJoint, List<String>> recoverableIntakeFeedIds,
-                List<Pair<FeedConnectJobInfo, List<String>>> recoverableSubscribers) {
-            this.recoverableConnectJobs = recoverableSubscribers;
-            this.recoverableIntakeFeedIds = recoverableIntakeFeedIds;
-        }
-
-        public List<Pair<FeedConnectJobInfo, List<String>>> getRecoverableSubscribers() {
-            return recoverableConnectJobs;
-        }
-
-        public Map<IFeedJoint, List<String>> getRecoverableIntakeFeedIds() {
-            return recoverableIntakeFeedIds;
-        }
-
-    }
-
-    @Override
-    public Set<IClusterManagementWork> notifyNodeJoin(String joinedNodeId) {
-        ClusterState newState = AsterixClusterProperties.INSTANCE.getState();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info(joinedNodeId + " joined the cluster. " + "Asterix state: " + newState);
-        }
-
-        boolean needToReActivateFeeds = !newState.equals(state) && (newState == ClusterState.ACTIVE);
-        if (needToReActivateFeeds) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info(joinedNodeId + " Resuming loser feeds (if any)");
-            }
-            try {
-                FeedsActivator activator = new FeedsActivator();
-                (new Thread(activator)).start();
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Exception in resuming feeds" + e.getMessage());
-                }
-            }
-            state = newState;
-        } else {
-            List<FeedCollectInfo> feedsThatCanBeRevived = new ArrayList<FeedCollectInfo>();
-            for (Entry<FeedCollectInfo, List<String>> entry : dependentFeeds.entrySet()) {
-                List<String> requiredNodeIds = entry.getValue();
-                if (requiredNodeIds.contains(joinedNodeId)) {
-                    requiredNodeIds.remove(joinedNodeId);
-                    if (requiredNodeIds.isEmpty()) {
-                        feedsThatCanBeRevived.add(entry.getKey());
-                    }
-                }
-            }
-            if (!feedsThatCanBeRevived.isEmpty()) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info(joinedNodeId + " Resuming feeds after rejoining of node " + joinedNodeId);
-                }
-                FeedsActivator activator = new FeedsActivator(feedsThatCanBeRevived);
-                (new Thread(activator)).start();
-            }
-        }
-        return null;
-    }
-
-    @Override
-    public void notifyRequestCompletion(IClusterManagementWorkResponse response) {
-        try {
-            responseInbox.put(response);
-        } catch (InterruptedException e) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("Interrupted exception");
-            }
-        }
-    }
-
-    @Override
-    public void notifyStateChange(ClusterState previousState, ClusterState newState) {
-        switch (newState) {
-            case ACTIVE:
-                if (previousState.equals(ClusterState.UNUSABLE)) {
-                    try {
-                        // TODO: Figure out why code was commented
-                        // FeedsActivator activator = new FeedsActivator();
-                        // (new Thread(activator)).start();
-                    } catch (Exception e) {
-                        if (LOGGER.isLoggable(Level.INFO)) {
-                            LOGGER.info("Exception in resuming feeds" + e.getMessage());
-                        }
-                    }
-                }
-                break;
-            default:
-                break;
-        }
-
-    }
-
-    public static class FeedsDeActivator implements Runnable {
-
-        private List<FeedConnectJobInfo> failedConnectjobs;
-
-        public FeedsDeActivator(List<FeedConnectJobInfo> failedConnectjobs) {
-            this.failedConnectjobs = failedConnectjobs;
-        }
-
-        @Override
-        public void run() {
-            for (FeedConnectJobInfo failedConnectJob : failedConnectjobs) {
-                endFeed(failedConnectJob);
-            }
-        }
-
-        private void endFeed(FeedConnectJobInfo cInfo) {
-            MetadataTransactionContext ctx = null;
-            PrintWriter writer = new PrintWriter(System.out, true);
-            SessionConfig pc = new SessionConfig(writer, OutputFormat.ADM);
-
-            try {
-                ctx = MetadataManager.INSTANCE.beginTransaction();
-                FeedId feedId = cInfo.getConnectionId().getFeedId();
-                DisconnectFeedStatement stmt = new DisconnectFeedStatement(new Identifier(feedId.getDataverse()),
-                        new Identifier(feedId.getFeedName()), new Identifier(cInfo.getConnectionId().getDatasetName()));
-                List<Statement> statements = new ArrayList<Statement>();
-                DataverseDecl dataverseDecl = new DataverseDecl(new Identifier(feedId.getDataverse()));
-                statements.add(dataverseDecl);
-                statements.add(stmt);
-                QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
-                translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
-                        QueryTranslator.ResultDelivery.SYNC);
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("End irrecoverable feed: " + cInfo.getConnectionId());
-                }
-                MetadataManager.INSTANCE.commitTransaction(ctx);
-            } catch (Exception e) {
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("Exception in ending loser feed: " + cInfo.getConnectionId() + " Exception "
-                            + e.getMessage());
-                }
-                e.printStackTrace();
-                try {
-                    MetadataManager.INSTANCE.abortTransaction(ctx);
-                } catch (Exception e2) {
-                    e2.addSuppressed(e);
-                    if (LOGGER.isLoggable(Level.SEVERE)) {
-                        LOGGER.severe("Exception in aborting transaction! System is in inconsistent state");
-                    }
-                }
-
-            }
-
-        }
-    }
-
-    public void submitFeedConnectionRequest(IFeedJoint feedPoint, FeedConnectionRequest subscriptionRequest)
-            throws Exception {
-        feedJobNotificationHandler.submitFeedConnectionRequest(feedPoint, subscriptionRequest);
-    }
-
-    @Override
-    public List<FeedConnectionId> getActiveFeedConnections(FeedId feedId) {
-        List<FeedConnectionId> connections = new ArrayList<FeedConnectionId>();
-        Collection<FeedConnectionId> activeConnections = feedJobNotificationHandler.getActiveFeedConnections();
-        if (feedId != null) {
-            for (FeedConnectionId connectionId : activeConnections) {
-                if (connectionId.getFeedId().equals(feedId)) {
-                    connections.add(connectionId);
-                }
-            }
-        } else {
-            connections.addAll(activeConnections);
-        }
-        return connections;
-    }
-
-    @Override
-    public List<String> getComputeLocations(FeedId feedId) {
-        return feedJobNotificationHandler.getFeedComputeLocations(feedId);
-    }
-
-    @Override
-    public List<String> getIntakeLocations(FeedId feedId) {
-        return feedJobNotificationHandler.getFeedIntakeLocations(feedId);
-    }
-
-    @Override
-    public List<String> getStoreLocations(FeedConnectionId feedConnectionId) {
-        return feedJobNotificationHandler.getFeedStorageLocations(feedConnectionId);
-    }
-
-    @Override
-    public List<String> getCollectLocations(FeedConnectionId feedConnectionId) {
-        return feedJobNotificationHandler.getFeedCollectLocations(feedConnectionId);
-    }
-
-    @Override
-    public boolean isFeedConnectionActive(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.isFeedConnectionActive(connectionId);
-    }
-
-    public void reportPartialDisconnection(FeedConnectionId connectionId) {
-        feedJobNotificationHandler.removeFeedJointsPostPipelineTermination(connectionId);
-    }
-
-    public void registerFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
-        feedReportQueue.put(feedId, queue);
-    }
-
-    public void deregisterFeedReportQueue(FeedConnectionId feedId, LinkedBlockingQueue<String> queue) {
-        feedReportQueue.remove(feedId);
-    }
-
-    public LinkedBlockingQueue<String> getFeedReportQueue(FeedConnectionId feedId) {
-        return feedReportQueue.get(feedId);
-    }
-
-    @Override
-    public IFeedJoint getAvailableFeedJoint(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.getAvailableFeedJoint(feedJointKey);
-    }
-
-    @Override
-    public boolean isFeedJointAvailable(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.isFeedPointAvailable(feedJointKey);
-    }
-
-    public void registerFeedJoint(IFeedJoint feedJoint) {
-        feedJobNotificationHandler.registerFeedJoint(feedJoint);
-    }
-
-    public IFeedJoint getFeedJoint(FeedJointKey feedJointKey) {
-        return feedJobNotificationHandler.getFeedJoint(feedJointKey);
-    }
-
-    @Override
-    public void registerFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        feedJobNotificationHandler.registerFeedEventSubscriber(connectionId, subscriber);
-    }
-
-    @Override
-    public void deregisterFeedEventSubscriber(FeedConnectionId connectionId, IFeedLifecycleEventSubscriber subscriber) {
-        feedJobNotificationHandler.deregisterFeedEventSubscriber(connectionId, subscriber);
-
-    }
-
-    public JobSpecification getCollectJobSpecification(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getCollectJobSpecification(connectionId);
-    }
-
-    public JobId getFeedCollectJobId(FeedConnectionId connectionId) {
-        return feedJobNotificationHandler.getFeedCollectJobId(connectionId);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedLoadManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedLoadManager.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedLoadManager.java
deleted file mode 100644
index 18e885d..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedLoadManager.java
+++ /dev/null
@@ -1,302 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.util.ArrayList;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.List;
-import java.util.Map;
-import java.util.Set;
-import java.util.TreeSet;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.feed.api.IFeedLoadManager;
-import org.apache.asterix.external.feed.api.IFeedTrackingManager;
-import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.external.feed.management.FeedConnectionId;
-import org.apache.asterix.external.feed.message.FeedCongestionMessage;
-import org.apache.asterix.external.feed.message.FeedReportMessage;
-import org.apache.asterix.external.feed.message.PrepareStallMessage;
-import org.apache.asterix.external.feed.message.ScaleInReportMessage;
-import org.apache.asterix.external.feed.message.TerminateDataFlowMessage;
-import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
-import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
-import org.apache.asterix.external.feed.watch.FeedActivity;
-import org.apache.asterix.external.feed.watch.NodeLoadReport;
-import org.apache.asterix.external.feed.watch.FeedJobInfo.FeedJobState;
-import org.apache.asterix.file.FeedOperations;
-import org.apache.asterix.metadata.feeds.FeedMetadataUtil;
-import org.apache.asterix.om.util.AsterixAppContextInfo;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.api.client.IHyracksClientConnection;
-import org.apache.hyracks.api.job.JobId;
-import org.apache.hyracks.api.job.JobSpecification;
-
-public class FeedLoadManager implements IFeedLoadManager {
-
-    private static final Logger LOGGER = Logger.getLogger(FeedLoadManager.class.getName());
-
-    private static final long MIN_MODIFICATION_INTERVAL = 180000; // 10 seconds
-    private final TreeSet<NodeLoadReport> nodeReports;
-    private final Map<FeedConnectionId, FeedActivity> feedActivities;
-    private final Map<String, Pair<Integer, Integer>> feedMetrics;
-
-    private FeedConnectionId lastModified;
-    private long lastModifiedTimestamp;
-
-    private static final int UNKNOWN = -1;
-
-    public FeedLoadManager() {
-        this.nodeReports = new TreeSet<NodeLoadReport>();
-        this.feedActivities = new HashMap<FeedConnectionId, FeedActivity>();
-        this.feedMetrics = new HashMap<String, Pair<Integer, Integer>>();
-    }
-
-    @Override
-    public void submitNodeLoadReport(NodeLoadReport report) {
-        nodeReports.remove(report);
-        nodeReports.add(report);
-    }
-
-    @Override
-    public void reportCongestion(FeedCongestionMessage message) throws AsterixException {
-        FeedRuntimeId runtimeId = message.getRuntimeId();
-        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
-        if (jobState == null
-                || (jobState.equals(FeedJobState.UNDER_RECOVERY))
-                || (message.getConnectionId().equals(lastModified) && System.currentTimeMillis()
-                        - lastModifiedTimestamp < MIN_MODIFICATION_INTERVAL)) {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Ignoring congestion report from " + runtimeId);
-            }
-            return;
-        } else {
-            try {
-                FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
-                int inflowRate = message.getInflowRate();
-                int outflowRate = message.getOutflowRate();
-                List<String> currentComputeLocations = new ArrayList<String>();
-                currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message
-                        .getConnectionId().getFeedId()));
-                int computeCardinality = currentComputeLocations.size();
-                int requiredCardinality = (int) Math
-                        .ceil((double) ((computeCardinality * inflowRate) / (double) outflowRate)) + 5;
-                int additionalComputeNodes = requiredCardinality - computeCardinality;
-                if (LOGGER.isLoggable(Level.WARNING)) {
-                    LOGGER.warning("INCREASING COMPUTE CARDINALITY from " + computeCardinality + " by "
-                            + additionalComputeNodes);
-                }
-
-                List<String> helperComputeNodes = getNodeForSubstitution(additionalComputeNodes);
-
-                // Step 1) Alter the original feed job to adjust the cardinality
-                JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
-                        .getConnectionId());
-                helperComputeNodes.addAll(currentComputeLocations);
-                List<String> newLocations = new ArrayList<String>();
-                newLocations.addAll(currentComputeLocations);
-                newLocations.addAll(helperComputeNodes);
-                FeedMetadataUtil.increaseCardinality(jobSpec, FeedRuntimeType.COMPUTE, requiredCardinality, newLocations);
-
-                // Step 2) send prepare to  stall message
-                gracefullyTerminateDataFlow(message.getConnectionId(), Integer.MAX_VALUE);
-
-                // Step 3) run the altered job specification
-                if (LOGGER.isLoggable(Level.INFO)) {
-                    LOGGER.info("New Job after adjusting to the workload " + jobSpec);
-                }
-
-                Thread.sleep(10000);
-                runJob(jobSpec, false);
-                lastModified = message.getConnectionId();
-                lastModifiedTimestamp = System.currentTimeMillis();
-
-            } catch (Exception e) {
-                e.printStackTrace();
-                if (LOGGER.isLoggable(Level.SEVERE)) {
-                    LOGGER.severe("Unable to form the required job for scaling in/out" + e.getMessage());
-                }
-                throw new AsterixException(e);
-            }
-        }
-    }
-
-    @Override
-    public void submitScaleInPossibleReport(ScaleInReportMessage message) throws Exception {
-        FeedJobState jobState = FeedLifecycleListener.INSTANCE.getFeedJobState(message.getConnectionId());
-        if (jobState == null || (jobState.equals(FeedJobState.UNDER_RECOVERY))) {
-            if (LOGGER.isLoggable(Level.WARNING)) {
-                LOGGER.warning("JobState information for job " + "[" + message.getConnectionId() + "]" + " not found ");
-            }
-            return;
-        } else {
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Processing scale-in message " + message);
-            }
-            FeedLifecycleListener.INSTANCE.setJobState(message.getConnectionId(), FeedJobState.UNDER_RECOVERY);
-            JobSpecification jobSpec = FeedLifecycleListener.INSTANCE.getCollectJobSpecification(message
-                    .getConnectionId());
-            int reducedCardinality = message.getReducedCardinaliy();
-            List<String> currentComputeLocations = new ArrayList<String>();
-            currentComputeLocations.addAll(FeedLifecycleListener.INSTANCE.getComputeLocations(message.getConnectionId()
-                    .getFeedId()));
-            FeedMetadataUtil.decreaseComputeCardinality(jobSpec, FeedRuntimeType.COMPUTE, reducedCardinality,
-                    currentComputeLocations);
-
-            gracefullyTerminateDataFlow(message.getConnectionId(), reducedCardinality - 1);
-            Thread.sleep(3000);
-            JobId newJobId = runJob(jobSpec, false);
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Launch modified job" + "[" + newJobId + "]" + "for scale-in \n" + jobSpec);
-            }
-
-        }
-    }
-
-    private void gracefullyTerminateDataFlow(FeedConnectionId connectionId, int computePartitionRetainLimit)
-            throws Exception {
-        // Step 1) send prepare to  stall message
-        PrepareStallMessage stallMessage = new PrepareStallMessage(connectionId, computePartitionRetainLimit);
-        List<String> intakeLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-        List<String> computeLocations = FeedLifecycleListener.INSTANCE.getComputeLocations(connectionId.getFeedId());
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-
-        Set<String> operatorLocations = new HashSet<String>();
-
-        operatorLocations.addAll(intakeLocations);
-        operatorLocations.addAll(computeLocations);
-        operatorLocations.addAll(storageLocations);
-
-        JobSpecification messageJobSpec = FeedOperations.buildPrepareStallMessageJob(stallMessage, operatorLocations);
-        runJob(messageJobSpec, true);
-
-        // Step 2)
-        TerminateDataFlowMessage terminateMesg = new TerminateDataFlowMessage(connectionId);
-        messageJobSpec = FeedOperations.buildTerminateFlowMessageJob(terminateMesg, intakeLocations);
-        runJob(messageJobSpec, true);
-    }
-
-    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
-        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
-        JobId jobId = hcc.startJob(spec);
-        if (waitForCompletion) {
-            hcc.waitForCompletion(jobId);
-        }
-        return jobId;
-    }
-
-    @Override
-    public void submitFeedRuntimeReport(FeedReportMessage report) {
-        String key = "" + report.getConnectionId() + ":" + report.getRuntimeId().getFeedRuntimeType();
-        Pair<Integer, Integer> value = feedMetrics.get(key);
-        if (value == null) {
-            value = new Pair<Integer, Integer>(report.getValue(), 1);
-            feedMetrics.put(key, value);
-        } else {
-            value.first = value.first + report.getValue();
-            value.second = value.second + 1;
-        }
-    }
-
-    @Override
-    public int getOutflowRate(FeedConnectionId connectionId, FeedRuntimeType runtimeType) {
-        int rVal;
-        String key = "" + connectionId + ":" + runtimeType;
-        feedMetrics.get(key);
-        Pair<Integer, Integer> value = feedMetrics.get(key);
-        if (value == null) {
-            rVal = UNKNOWN;
-        } else {
-            rVal = value.first / value.second;
-        }
-        return rVal;
-    }
-
-    private List<String> getNodeForSubstitution(int nRequired) {
-        List<String> nodeIds = new ArrayList<String>();
-        Iterator<NodeLoadReport> it = null;
-        int nAdded = 0;
-        while (nAdded < nRequired) {
-            it = nodeReports.iterator();
-            while (it.hasNext()) {
-                nodeIds.add(it.next().getNodeId());
-                nAdded++;
-            }
-        }
-        return nodeIds;
-    }
-
-    @Override
-    public synchronized List<String> getNodes(int required) {
-        Iterator<NodeLoadReport> it;
-        List<String> allocated = new ArrayList<String>();
-        while (allocated.size() < required) {
-            it = nodeReports.iterator();
-            while (it.hasNext() && allocated.size() < required) {
-                allocated.add(it.next().getNodeId());
-            }
-        }
-        return allocated;
-    }
-
-    @Override
-    public void reportThrottlingEnabled(ThrottlingEnabledFeedMessage mesg) throws AsterixException, Exception {
-        System.out.println("Throttling Enabled for " + mesg.getConnectionId() + " " + mesg.getFeedRuntimeId());
-        FeedConnectionId connectionId = mesg.getConnectionId();
-        List<String> destinationLocations = new ArrayList<String>();
-        List<String> storageLocations = FeedLifecycleListener.INSTANCE.getStoreLocations(connectionId);
-        List<String> collectLocations = FeedLifecycleListener.INSTANCE.getCollectLocations(connectionId);
-
-        destinationLocations.addAll(storageLocations);
-        destinationLocations.addAll(collectLocations);
-        JobSpecification messageJobSpec = FeedOperations.buildNotifyThrottlingEnabledMessageJob(mesg,
-                destinationLocations);
-        runJob(messageJobSpec, true);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.warning("Acking disabled for " + mesg.getConnectionId() + " in view of activated throttling");
-        }
-        IFeedTrackingManager trackingManager = CentralFeedManager.getInstance().getFeedTrackingManager();
-        trackingManager.disableAcking(connectionId);
-    }
-
-    @Override
-    public void reportFeedActivity(FeedConnectionId connectionId, FeedActivity activity) {
-        feedActivities.put(connectionId, activity);
-    }
-
-    @Override
-    public FeedActivity getFeedActivity(FeedConnectionId connectionId) {
-        return feedActivities.get(connectionId);
-    }
-
-    @Override
-    public Collection<FeedActivity> getFeedActivities() {
-        return feedActivities.values();
-    }
-
-    @Override
-    public void removeFeedActivity(FeedConnectionId connectionId) {
-        feedActivities.remove(connectionId);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/feed/FeedMessageReceiver.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/feed/FeedMessageReceiver.java b/asterix-app/src/main/java/org/apache/asterix/feed/FeedMessageReceiver.java
deleted file mode 100644
index 4ae2e59..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/feed/FeedMessageReceiver.java
+++ /dev/null
@@ -1,96 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.feed;
-
-import java.util.logging.Level;
-
-import org.apache.asterix.external.feed.api.IFeedLoadManager;
-import org.apache.asterix.external.feed.api.IFeedTrackingManager;
-import org.apache.asterix.external.feed.api.IFeedMessage.MessageType;
-import org.apache.asterix.external.feed.message.FeedCongestionMessage;
-import org.apache.asterix.external.feed.message.FeedReportMessage;
-import org.apache.asterix.external.feed.message.FeedTupleCommitAckMessage;
-import org.apache.asterix.external.feed.message.MessageReceiver;
-import org.apache.asterix.external.feed.message.ScaleInReportMessage;
-import org.apache.asterix.external.feed.message.StorageReportFeedMessage;
-import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
-import org.apache.asterix.external.feed.watch.NodeLoadReport;
-import org.apache.asterix.external.util.FeedConstants;
-import org.apache.asterix.feed.CentralFeedManager.AQLExecutor;
-import org.apache.asterix.hyracks.bootstrap.FeedBootstrap;
-import org.apache.hyracks.api.exceptions.HyracksDataException;
-import org.json.JSONObject;
-
-public class FeedMessageReceiver extends MessageReceiver<String> {
-
-    private static boolean initialized;
-
-    private final IFeedLoadManager feedLoadManager;
-    private final IFeedTrackingManager feedTrackingManager;
-
-    public FeedMessageReceiver(CentralFeedManager centralFeedManager) {
-        this.feedLoadManager = centralFeedManager.getFeedLoadManager();
-        this.feedTrackingManager = centralFeedManager.getFeedTrackingManager();
-    }
-
-    @Override
-    public void processMessage(String message) throws Exception {
-        JSONObject obj = new JSONObject(message);
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Received message " + obj);
-        }
-        MessageType messageType = MessageType.valueOf(obj.getString(FeedConstants.MessageConstants.MESSAGE_TYPE));
-        switch (messageType) {
-            case XAQL:
-                if (!initialized) {
-                    FeedBootstrap.setUpInitialArtifacts();
-                    initialized = true;
-                }
-                AQLExecutor.executeAQL(obj.getString(FeedConstants.MessageConstants.AQL));
-                break;
-            case CONGESTION:
-                feedLoadManager.reportCongestion(FeedCongestionMessage.read(obj));
-                break;
-            case FEED_REPORT:
-                feedLoadManager.submitFeedRuntimeReport(FeedReportMessage.read(obj));
-                break;
-            case NODE_REPORT:
-                feedLoadManager.submitNodeLoadReport(NodeLoadReport.read(obj));
-                break;
-            case SCALE_IN_REQUEST:
-                feedLoadManager.submitScaleInPossibleReport(ScaleInReportMessage.read(obj));
-                break;
-            case STORAGE_REPORT:
-                FeedLifecycleListener.INSTANCE.updateTrackingInformation(StorageReportFeedMessage.read(obj));
-                break;
-            case COMMIT_ACK:
-                feedTrackingManager.submitAckReport(FeedTupleCommitAckMessage.read(obj));
-                break;
-            case THROTTLING_ENABLED:
-                feedLoadManager.reportThrottlingEnabled(ThrottlingEnabledFeedMessage.read(obj));
-            default:
-                break;
-        }
-
-    }
-
-    @Override
-    public void emptyInbox() throws HyracksDataException {
-    }
-}


[06/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
new file mode 100644
index 0000000..4303442
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/parser/test/ADMDataParserTest.java
@@ -0,0 +1,116 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.parser.test;
+
+import java.io.ByteArrayOutputStream;
+import java.io.DataOutput;
+import java.io.DataOutputStream;
+import java.util.concurrent.atomic.AtomicInteger;
+
+import org.apache.asterix.external.parser.ADMDataParser;
+import org.apache.asterix.om.base.AMutableDate;
+import org.apache.asterix.om.base.AMutableDateTime;
+import org.apache.asterix.om.base.AMutableTime;
+import org.junit.Assert;
+import org.junit.Test;
+
+import junit.extensions.PA;
+
+public class ADMDataParserTest {
+
+    @Test
+    public void test() {
+        String[] dates = { "-9537-08-04", "9656-06-03", "-9537-04-04", "9656-06-04", "-9537-10-04", "9626-09-05" };
+        AMutableDate[] parsedDates = new AMutableDate[] { new AMutableDate(-4202630), new AMutableDate(2807408),
+                new AMutableDate(-4202752), new AMutableDate(2807409), new AMutableDate(-4202569),
+                new AMutableDate(2796544), };
+
+        String[] times = { "12:04:45.689Z", "12:41:59.002Z", "12:10:45.169Z", "15:37:48.736Z", "04:16:42.321Z",
+                "12:22:56.816Z" };
+        AMutableTime[] parsedTimes = new AMutableTime[] { new AMutableTime(43485689), new AMutableTime(45719002),
+                new AMutableTime(43845169), new AMutableTime(56268736), new AMutableTime(15402321),
+                new AMutableTime(44576816), };
+
+        String[] dateTimes = { "-2640-10-11T17:32:15.675Z", "4104-02-01T05:59:11.902Z", "0534-12-08T08:20:31.487Z",
+                "6778-02-16T22:40:21.653Z", "2129-12-12T13:18:35.758Z", "8647-07-01T13:10:19.691Z" };
+        AMutableDateTime[] parsedDateTimes = new AMutableDateTime[] { new AMutableDateTime(-145452954464325L),
+                new AMutableDateTime(67345192751902L), new AMutableDateTime(-45286270768513L),
+                new AMutableDateTime(151729886421653L), new AMutableDateTime(5047449515758L),
+                new AMutableDateTime(210721439419691L) };
+
+        Thread[] threads = new Thread[16];
+        AtomicInteger errorCount = new AtomicInteger(0);
+        for (int i = 0; i < threads.length; ++i) {
+            threads[i] = new Thread(new Runnable() {
+                ADMDataParser parser = new ADMDataParser();
+                ByteArrayOutputStream bos = new ByteArrayOutputStream();
+                DataOutput dos = new DataOutputStream(bos);
+
+                @Override
+                public void run() {
+                    try {
+                        int round = 0;
+                        while (round++ < 10000) {
+                            // Test parseDate.
+                            for (int index = 0; index < dates.length; ++index) {
+                                PA.invokeMethod(parser, "parseDate(java.lang.String, java.io.DataOutput)",
+                                        dates[index], dos);
+                                AMutableDate aDate = (AMutableDate) PA.getValue(parser, "aDate");
+                                Assert.assertTrue(aDate.equals(parsedDates[index]));
+                            }
+
+                            // Tests parseTime.
+                            for (int index = 0; index < times.length; ++index) {
+                                PA.invokeMethod(parser, "parseTime(java.lang.String, java.io.DataOutput)",
+                                        times[index], dos);
+                                AMutableTime aTime = (AMutableTime) PA.getValue(parser, "aTime");
+                                Assert.assertTrue(aTime.equals(parsedTimes[index]));
+                            }
+
+                            // Tests parseDateTime.
+                            for (int index = 0; index < dateTimes.length; ++index) {
+                                PA.invokeMethod(parser, "parseDateTime(java.lang.String, java.io.DataOutput)",
+                                        dateTimes[index], dos);
+                                AMutableDateTime aDateTime = (AMutableDateTime) PA.getValue(parser, "aDateTime");
+                                Assert.assertTrue(aDateTime.equals(parsedDateTimes[index]));
+                            }
+                        }
+                    } catch (Exception e) {
+                        errorCount.incrementAndGet();
+                        e.printStackTrace();
+                    }
+                }
+            });
+            // Kicks off test threads.
+            threads[i].start();
+        }
+
+        // Joins all the threads.
+        try {
+            for (int i = 0; i < threads.length; ++i) {
+                threads[i].join();
+            }
+        } catch (InterruptedException e) {
+            throw new IllegalStateException(e);
+        }
+        // Asserts no failure.
+        Assert.assertTrue(errorCount.get() == 0);
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/runtime/operator/file/ADMDataParserTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/runtime/operator/file/ADMDataParserTest.java b/asterix-external-data/src/test/java/org/apache/asterix/runtime/operator/file/ADMDataParserTest.java
deleted file mode 100644
index c6939c9..0000000
--- a/asterix-external-data/src/test/java/org/apache/asterix/runtime/operator/file/ADMDataParserTest.java
+++ /dev/null
@@ -1,116 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.runtime.operator.file;
-
-import java.io.ByteArrayOutputStream;
-import java.io.DataOutput;
-import java.io.DataOutputStream;
-import java.util.concurrent.atomic.AtomicInteger;
-
-import org.apache.asterix.external.parser.ADMDataParser;
-import org.apache.asterix.om.base.AMutableDate;
-import org.apache.asterix.om.base.AMutableDateTime;
-import org.apache.asterix.om.base.AMutableTime;
-import org.junit.Assert;
-import org.junit.Test;
-
-import junit.extensions.PA;
-
-public class ADMDataParserTest {
-
-    @Test
-    public void test() {
-        String[] dates = { "-9537-08-04", "9656-06-03", "-9537-04-04", "9656-06-04", "-9537-10-04", "9626-09-05" };
-        AMutableDate[] parsedDates = new AMutableDate[] { new AMutableDate(-4202630), new AMutableDate(2807408),
-                new AMutableDate(-4202752), new AMutableDate(2807409), new AMutableDate(-4202569),
-                new AMutableDate(2796544), };
-
-        String[] times = { "12:04:45.689Z", "12:41:59.002Z", "12:10:45.169Z", "15:37:48.736Z", "04:16:42.321Z",
-                "12:22:56.816Z" };
-        AMutableTime[] parsedTimes = new AMutableTime[] { new AMutableTime(43485689), new AMutableTime(45719002),
-                new AMutableTime(43845169), new AMutableTime(56268736), new AMutableTime(15402321),
-                new AMutableTime(44576816), };
-
-        String[] dateTimes = { "-2640-10-11T17:32:15.675Z", "4104-02-01T05:59:11.902Z", "0534-12-08T08:20:31.487Z",
-                "6778-02-16T22:40:21.653Z", "2129-12-12T13:18:35.758Z", "8647-07-01T13:10:19.691Z" };
-        AMutableDateTime[] parsedDateTimes = new AMutableDateTime[] { new AMutableDateTime(-145452954464325L),
-                new AMutableDateTime(67345192751902L), new AMutableDateTime(-45286270768513L),
-                new AMutableDateTime(151729886421653L), new AMutableDateTime(5047449515758L),
-                new AMutableDateTime(210721439419691L) };
-
-        Thread[] threads = new Thread[16];
-        AtomicInteger errorCount = new AtomicInteger(0);
-        for (int i = 0; i < threads.length; ++i) {
-            threads[i] = new Thread(new Runnable() {
-                ADMDataParser parser = new ADMDataParser();
-                ByteArrayOutputStream bos = new ByteArrayOutputStream();
-                DataOutput dos = new DataOutputStream(bos);
-
-                @Override
-                public void run() {
-                    try {
-                        int round = 0;
-                        while (round++ < 10000) {
-                            // Test parseDate.
-                            for (int index = 0; index < dates.length; ++index) {
-                                PA.invokeMethod(parser, "parseDate(java.lang.String, java.io.DataOutput)",
-                                        dates[index], dos);
-                                AMutableDate aDate = (AMutableDate) PA.getValue(parser, "aDate");
-                                Assert.assertTrue(aDate.equals(parsedDates[index]));
-                            }
-
-                            // Tests parseTime.
-                            for (int index = 0; index < times.length; ++index) {
-                                PA.invokeMethod(parser, "parseTime(java.lang.String, java.io.DataOutput)",
-                                        times[index], dos);
-                                AMutableTime aTime = (AMutableTime) PA.getValue(parser, "aTime");
-                                Assert.assertTrue(aTime.equals(parsedTimes[index]));
-                            }
-
-                            // Tests parseDateTime.
-                            for (int index = 0; index < dateTimes.length; ++index) {
-                                PA.invokeMethod(parser, "parseDateTime(java.lang.String, java.io.DataOutput)",
-                                        dateTimes[index], dos);
-                                AMutableDateTime aDateTime = (AMutableDateTime) PA.getValue(parser, "aDateTime");
-                                Assert.assertTrue(aDateTime.equals(parsedDateTimes[index]));
-                            }
-                        }
-                    } catch (Exception e) {
-                        errorCount.incrementAndGet();
-                        e.printStackTrace();
-                    }
-                }
-            });
-            // Kicks off test threads.
-            threads[i].start();
-        }
-
-        // Joins all the threads.
-        try {
-            for (int i = 0; i < threads.length; ++i) {
-                threads[i].join();
-            }
-        } catch (InterruptedException e) {
-            throw new IllegalStateException(e);
-        }
-        // Asserts no failure.
-        Assert.assertTrue(errorCount.get() == 0);
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/resources/August16-20-long.txt
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/August16-20-long.txt b/asterix-external-data/src/test/resources/August16-20-long.txt
new file mode 100644
index 0000000..7a1abd7
--- /dev/null
+++ b/asterix-external-data/src/test/resources/August16-20-long.txt
@@ -0,0 +1,1106 @@
+MaxWallTimeMins_RAW = 1315
+CRAB_ASOTimeout = 86400
+MaxHosts = 1
+RequestMemory_RAW = 2000
+CRAB_TFileOutputFiles = {  }
+User = "uscms5616@cms"
+JobFinishedHookDone = 1439847319
+DAG_NodesReady = 0
+OnExitHold = ( ExitCode =!= undefined && ExitCode != 0 )
+CoreSize = -1
+CRAB_DashboardTaskType = "analysis"
+DAG_NodesDone = 25
+CRAB_Attempt = 0
+LastHoldReason = "Spooling input data files"
+WantRemoteSyscalls = false
+MyType = "Job"
+CumulativeSuspensionTime = 0
+MinHosts = 1
+ReleaseReason = "Data files spooled"
+PeriodicHold = false
+PeriodicRemove = ( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )
+Err = "_condor_stderr"
+CRAB_AdditionalOutputFiles = { "combine_output.tar" }
+ProcId = 0
+CRAB_UserGroup = "dcms"
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+EnteredCurrentStatus = 1439847319
+CRAB_SiteWhitelist = {  }
+NumJobStarts = 1
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage"
+JobUniverse = 7
+AutoClusterId = 10378
+In = "/dev/null"
+SUBMIT_TransferOutputRemaps = "_condor_stdout=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.out;_condor_stderr=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.err"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/73/uscms5616/150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+Requirements = true || false && TARGET.OPSYS == "LINUX" && TARGET.ARCH == "X86_64" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory
+CRAB_SplitAlgo = "EventBased"
+CRAB_UserDN = "/C=DE/O=GermanGrid/OU=KIT/CN=Andrew Gilbert"
+ClusterId = 1217455
+WhenToTransferOutput = "ON_EXIT"
+CRAB_AsyncDest = "T2_CH_CERN"
+CompletionDate = 1439847319
+OtherJobRemoveRequirements = DAGManJobId =?= ClusterId
+CRAB_FailedNodeLimit = -1
+BufferSize = 524288
+CRAB_RestURInoAPI = "/crabserver/prod"
+Environment = strcat("PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=",ClusterId,".",ProcId," CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local")
+TargetType = "Machine"
+LeaveJobInQueue = JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )
+CRAB_UserRole = undefined
+JobNotification = 0
+Owner = "uscms5616"
+CondorPlatform = "$CondorPlatform: X86_64-ScientificLinux_6.6 $"
+CRAB_UserHN = "agilbert"
+CommittedTime = 0
+X509UserProxy = "63f0c4d862d8b4e4ddcfd29ed85b6b5899660759"
+QDate = 1439764883
+ExitStatus = 0
+DAG_NodesFailed = 0
+RootDir = "/"
+JobCurrentStartDate = 1439764892
+CurrentHosts = 0
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1217455.0#1439764883"
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+RemoteSysCpu = 0.0
+TotalSuspensions = 0
+WantCheckpoint = false
+CRAB_RestHost = "cmsweb.cern.ch"
+CRAB_RetryOnASOFailures = 1
+Args = "RunJobs.dag"
+TransferInput = "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz"
+CRAB_JobArch = "slc6_amd64_gcc491"
+PeriodicRelease = false
+CRAB_TaskWorker = "vocms052"
+NumCkpts_RAW = 0
+CondorVersion = "$CondorVersion: 8.3.1 Jun 19 2015 $"
+RemoteCondorSetup = ""
+Out = "_condor_stdout"
+ShouldTransferFiles = "YES"
+DAG_NodesPrerun = 0
+DiskUsage = 1
+JobRunCount = 1
+CumulativeSlotTime = 82427.0
+CommittedSlotTime = 0
+LocalUserCpu = 0.0
+CRAB_SiteBlacklist = { "T2_FR_CCIN2P3","T1_IT_CNAF","T1_ES_PIC","T1_UK_RAL","T2_FI_HIP","T2_US_Nebraska" }
+DAG_NodesQueued = 0
+CRAB_JobCount = 25
+JobStartDate = 1439764892
+DAG_Status = 0
+CRAB_AlgoArgs = "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"splitOnRun\": false, \"events_per_job\": 1}, \"halt_job_on_file_boundaries\": false}"
+CRAB_SaveLogsFlag = 0
+CRAB_JobType = "analysis"
+CRAB_TransferOutputs = 1
+ExitBySignal = false
+StreamErr = false
+RemoveKillSig = "SIGUSR1"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+NumRestarts = 0
+NumSystemHolds = 0
+RequestDisk = DiskUsage
+OrigMaxHosts = 1
+JobPrio = 10
+NumCkpts = 0
+BufferBlockSize = 32768
+StageInStart = 1439764886
+ImageSize = 100
+MaxWallTimeMins = 1400
+DiskUsage_RAW = 1
+DAG_NodesUnready = 0
+CommittedSuspensionTime = 0
+CRAB_NumAutomJobRetries = 2
+CRAB_UserVO = "cms"
+CRAB_EDMOutputFiles = {  }
+Cmd = "dag_bootstrap_startup.sh"
+LocalSysCpu = 0.0
+Iwd = "/data/condor_local/spool/7455/0/cluster1217455.proc0.subproc0"
+LastHoldReasonCode = 16
+CRAB_PublishName = "prefit_cms_asimov_A1_5D-59ffde2b5d41be5f0c401d0a6a8a0194"
+CRAB_LumiMask = "{}"
+DAG_InRecovery = 0
+CRAB_MaxPost = 20
+TaskType = "ROOT"
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+LastSuspensionTime = 0
+CRAB_PublishGroupName = 0
+TransferOutputRemaps = undefined
+TransferOutput = "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001"
+CRAB_Workflow = "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+CRAB_JobSW = "CMSSW_7_4_0_pre9"
+DAG_NodesPostrun = 0
+ExitCode = 0
+JobStatus = 4
+RemoteWallClockTime = 82427.0
+ImageSize_RAW = 100
+OnExitRemove = ( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )
+DAG_NodesTotal = 25
+CRAB_InputData = "/MinBias"
+SUBMIT_x509userproxy = "/data/certs/creds/63f0c4d862d8b4e4ddcfd29ed85b6b5899660759"
+StreamOut = false
+CRAB_ReqName = "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+CurrentTime = time()
+HoldKillSig = "SIGUSR1"
+RequestMemory = 2000
+NiceUser = false
+RemoteUserCpu = 0.0
+CRAB_Publish = 0
+RequestCpus = 1
+SUBMIT_Iwd = "/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9"
+WantRemoteIO = true
+CRAB_BlacklistT1 = 0
+StageInFinish = 1439764891
+LastJobStatus = 2
+
+MaxWallTimeMins_RAW = 1315
+CRAB_ASOTimeout = 86400
+MaxHosts = 1
+RequestMemory_RAW = 2000
+CRAB_TFileOutputFiles = {  }
+User = "uscms5050@cms"
+JobFinishedHookDone = 1439773907
+DAG_NodesReady = 0
+OnExitHold = ( ExitCode =!= undefined && ExitCode != 0 )
+CoreSize = -1
+CRAB_DashboardTaskType = "analysis"
+DAG_NodesDone = 30
+CRAB_Attempt = 0
+LastHoldReason = "Spooling input data files"
+WantRemoteSyscalls = false
+MyType = "Job"
+CumulativeSuspensionTime = 0
+MinHosts = 1
+ReleaseReason = "Data files spooled"
+PeriodicHold = false
+PeriodicRemove = ( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )
+Err = "_condor_stderr"
+CRAB_AdditionalOutputFiles = {  }
+ProcId = 0
+CRAB_UserGroup = undefined
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+EnteredCurrentStatus = 1439773907
+CRAB_SiteWhitelist = { "T3_US_FNALLPC","T2_US_Purdue","T2_US_Nebraska" }
+NumJobStarts = 1
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage"
+JobUniverse = 7
+AutoClusterId = 10378
+In = "/dev/null"
+SUBMIT_TransferOutputRemaps = "_condor_stdout=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.out;_condor_stderr=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.err"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/67/uscms5050/150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+Requirements = true || false && TARGET.OPSYS == "LINUX" && TARGET.ARCH == "X86_64" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory
+CRAB_SplitAlgo = "EventBased"
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ferencek/CN=650164/CN=Dinko Ferencek"
+ClusterId = 1206367
+WhenToTransferOutput = "ON_EXIT"
+CRAB_AsyncDest = "T3_US_FNALLPC"
+CompletionDate = 1439773907
+OtherJobRemoveRequirements = DAGManJobId =?= ClusterId
+CRAB_FailedNodeLimit = -1
+BufferSize = 524288
+CRAB_RestURInoAPI = "/crabserver/prod"
+Environment = strcat("PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=",ClusterId,".",ProcId," CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local")
+TargetType = "Machine"
+LeaveJobInQueue = JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )
+x509userproxyexpiration = 1440294044
+CRAB_UserRole = undefined
+JobNotification = 0
+Owner = "uscms5050"
+CondorPlatform = "$CondorPlatform: X86_64-ScientificLinux_6.6 $"
+CRAB_UserHN = "ferencek"
+CommittedTime = 0
+X509UserProxy = "3a7798796bc24a800001338917ec45991bcf0a96"
+QDate = 1439615565
+ExitStatus = 0
+DAG_NodesFailed = 0
+RootDir = "/"
+JobCurrentStartDate = 1439615574
+CurrentHosts = 0
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1206367.0#1439615565"
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+RemoteSysCpu = 0.0
+TotalSuspensions = 0
+WantCheckpoint = false
+CRAB_RestHost = "cmsweb.cern.ch"
+CRAB_RetryOnASOFailures = 1
+Args = "RunJobs.dag"
+TransferInput = "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz"
+CRAB_JobArch = "slc6_amd64_gcc481"
+PeriodicRelease = false
+CRAB_TaskWorker = "vocms052"
+NumCkpts_RAW = 0
+CondorVersion = "$CondorVersion: 8.3.1 Jun 19 2015 $"
+RemoteCondorSetup = ""
+Out = "_condor_stdout"
+ShouldTransferFiles = "YES"
+DAG_NodesPrerun = 0
+DiskUsage = 1
+JobRunCount = 1
+CumulativeSlotTime = 158333.0
+CommittedSlotTime = 0
+LocalUserCpu = 0.0
+CRAB_SiteBlacklist = {  }
+DAG_NodesQueued = 0
+CRAB_JobCount = 30
+JobStartDate = 1439615574
+DAG_Status = 0
+CRAB_AlgoArgs = "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"lheInputFiles\": true, \"splitOnRun\": false, \"events_per_job\": 50000}, \"halt_job_on_file_boundaries\": false}"
+CRAB_SaveLogsFlag = 0
+CRAB_JobType = "analysis"
+CRAB_TransferOutputs = 1
+ExitBySignal = false
+StreamErr = false
+RemoveKillSig = "SIGUSR1"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+NumRestarts = 0
+NumSystemHolds = 0
+RequestDisk = DiskUsage
+OrigMaxHosts = 1
+JobPrio = 10
+NumCkpts = 0
+BufferBlockSize = 32768
+StageInStart = 1439615569
+ImageSize = 100
+MaxWallTimeMins = 1400
+DiskUsage_RAW = 1
+DAG_NodesUnready = 0
+CommittedSuspensionTime = 0
+CRAB_NumAutomJobRetries = 2
+CRAB_UserVO = "cms"
+CRAB_EDMOutputFiles = { "Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE.root" }
+Cmd = "dag_bootstrap_startup.sh"
+LocalSysCpu = 0.0
+Iwd = "/data/condor_local/spool/6367/0/cluster1206367.proc0.subproc0"
+LastHoldReasonCode = 16
+CRAB_PublishName = "LHE-17521057f93ed9cadf21dd45b3505145"
+CRAB_LumiMask = "{}"
+DAG_InRecovery = 0
+CRAB_MaxPost = 20
+TaskType = "ROOT"
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+LastSuspensionTime = 0
+CRAB_PublishGroupName = 0
+TransferOutputRemaps = undefined
+TransferOutput = "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001"
+CRAB_Workflow = "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+CRAB_JobSW = "CMSSW_7_1_18"
+DAG_NodesPostrun = 0
+ExitCode = 0
+JobStatus = 4
+RemoteWallClockTime = 158333.0
+ImageSize_RAW = 100
+OnExitRemove = ( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )
+DAG_NodesTotal = 30
+CRAB_InputData = "/Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8"
+SUBMIT_x509userproxy = "/data/certs/creds/3a7798796bc24a800001338917ec45991bcf0a96"
+StreamOut = false
+CRAB_ReqName = "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+CurrentTime = time()
+HoldKillSig = "SIGUSR1"
+RequestMemory = 2000
+NiceUser = false
+RemoteUserCpu = 0.0
+CRAB_Publish = 1
+RequestCpus = 1
+SUBMIT_Iwd = "/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB"
+WantRemoteIO = true
+CRAB_BlacklistT1 = 0
+StageInFinish = 1439615572
+LastJobStatus = 2
+
+MaxWallTimeMins_RAW = 2800
+StatsLifetimeStarter = 165949
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "grid_cms"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins3@cmsgwms-factory.fnal.gov"
+SubmitEventNotes = "DAG Node: Job53"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+DAGParentNodeNames = ""
+MATCH_GLIDEIN_Site = "CERN"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 163084.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "59069"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+RecentBlockWrites = 0
+CurrentHosts = 0
+MATCH_GLIDEIN_ProcId = 1
+x509UserProxyExpiration = 1440397268
+Iwd = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_CH_CERN_ce302"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 75000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/128.142.45.103"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "689255460"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job53"
+LastPublicClaimId = "<128.142.45.103:55332>#1439963327#3#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_CH_CERN"
+RemoteSysCpu = 1963.0
+CRAB_Retry = 2
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1238992
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2800"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms5111"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_CH_CERN_ce302"
+LastJobLeaseRenewal = 1440131524
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_CH_CERN"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.main"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.kbutanov"
+MATCH_GLIDEIN_SiteWMS_Slot = "Unknown"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms5111@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm-eoscms.cern.ch"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2800
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440131525
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 1
+MATCH_GLIDEIN_Factory = "gfactory_service"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1233705.0#1439964847"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 59069
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1439965573
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/8123da6528ec4abd24562a99b4f2b0ec556bed0b"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440530096"
+MATCH_EXP_JOB_GLIDEIN_Factory = "gfactory_service"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.main"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 2128005.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=53 --cmsswVersion=CMSSW_7_4_7 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v2/50000/6E2F932B-633B-E511-A7AE-F04DA23BCE4C.root\"] --runAndLumis=job_lumis_53.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.53"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 165965.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_CH_CERN"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 53
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2800"
+MATCH_GLIDEIN_ToRetire = 1440530096
+ImageSize = 4250000
+JobCurrentStartDate = 1439965560
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1439965560
+LastMatchTime = 1439965560
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440564896"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+NumJobReconnects = 2
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins3@cmsgwms-factory.fnal.gov"
+SpooledOutputFiles = "jobReport.json.53"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.53"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 165965.0
+JobStatus = 4
+x509UserProxyEmail = "khakimjan.butanov@cern.ch"
+DAGManJobId = 1035690
+RemoteWallClockTime = 165965.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov"
+LastRemoteHost = "glidein_9757_931570227@b635ef6906.cern.ch"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T1_DE_KIT,T2_UK_London_IC,T2_CH_CERN"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 61434
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "1"
+CRAB_localOutputFiles = "stepB_MC.root=stepB_MC_53.root"
+MaxHosts = 1
+CRAB_UserHN = "kbutanov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm-eoscms.cern.ch"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms5111"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 4095188
+MATCH_EXP_Used_Gatekeeper = "ce302.cern.ch:8443/cream-lsf-grid_cms"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "grid_cms"
+MATCH_GLIDEIN_Gatekeeper = "ce302.cern.ch:8443/cream-lsf-grid_cms"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "CERN"
+UserLog = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM#85bfee36-3b82-11e5-be34-001e67abf518"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 2
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1439964847
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.53"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/log/cmsRun_53.log.tar.gz, srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/stepB_MC_53.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "stepB_MC.root" }
+AutoClusterId = 16275
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439209593
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 2
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "CERN"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_KR_KNU"
+ClusterId = 1233705
+BytesSent = 119952.0
+CRAB_PublishName = "crab_25ns_WJetsToLNu_HT600_800-9da7f68dc2032d8626d7e7822bb10506"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/user/kbutanov.03af76ad04ddc195ee96e6a5469f1bbb1777390d/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_CH_CERN"
+MATCH_GLIDEIN_MaxMemMBs = 2800
+RequestMemory = 2000
+EnteredCurrentStatus = 1440131525
+MATCH_GLIDEIN_SiteWMS = "LSF"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/68/uscms5111/150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "689255460"
+CRAB_JobSW = "CMSSW_7_4_7"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 2800
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "Unknown"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440131525
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440564896
+NiceUser = false
+RootDir = "/"
+CommittedTime = 165965
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "LSF"
+
+MaxWallTimeMins_RAW = 1400
+StatsLifetimeStarter = 33352
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "red.unl.edu"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins6@glidein.grid.iu.edu"
+SubmitEventNotes = "DAG Node: Job4"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+MATCH_GLIDEIN_Site = "Nebraska"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 28513.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "2561111"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+CurrentHosts = 0
+RecentBlockWrites = 0
+MATCH_GLIDEIN_ProcId = 8
+x509UserProxyExpiration = 1440171330
+Iwd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_long"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 3750000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/129.93.182.12"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "5092137.0"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job4"
+LastPublicClaimId = "<129.93.182.12:42491>#1440048812#7#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_US_Nebraska"
+RemoteSysCpu = 616.0
+CRAB_Retry = 3
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1148372
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2500"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms3850"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_long"
+LastJobLeaseRenewal = 1440115142
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_US_Nebraska"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.mrodozov"
+MATCH_GLIDEIN_SiteWMS_Slot = "slot1_6@red-d8n12.unl.edu"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms3850@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm.unl.edu"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2500
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440115142
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 0
+MATCH_GLIDEIN_Factory = "OSGGOC"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1235992.0#1440081300"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 2561111
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1440081789
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440616411"
+MATCH_EXP_JOB_GLIDEIN_Factory = "OSGGOC"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 44879356.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=4 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9A89CA60-69FC-E411-9661-0025905C42B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/34F8B66A-D4FB-E411-8F89-842B2B29273C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/7CE6B848-F5FB-E411-A605-0025905A60A8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9E842AA8-54FC-E411-8BC7-000F53273500.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/C44AD465-D4FB-E411-8704-002590200A40.root\",' '\"/store/m
 c/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/90B6CB1B-07FD-E411-BD52-001E67397CBA.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/183FB65F-69FC-E411-A5A8-0025904B7C26.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/5A0A9A0E-EDFB-E411-B95F-00266CF330B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/3E3768F1-61FC-E411-B163-002618943956.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/08DB9DDE-F4FB-E411-9BC9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/CE293F9B-54FC-E411-83E8-AC853D9DACD3.root\",' '\"/store/mc/RunIISpring
 15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/A4479F5F-69FC-E411-B0B5-0025904C6378.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/0419455F-D4FB-E411-AEFA-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E6BD6C76-54FC-E411-A1F2-AC853D9DACD7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/AC15F863-F5FB-E411-8F07-002590DB9286.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/CC9B7EE2-F4FB-E411-BCD9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/5844575F-D4FB-E411-81F5-003048FFD732.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt
 _300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/6EC5205E-D4FB-E411-9885-001E67396BB7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/B63200E8-69FC-E411-B949-0025904C51FC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/14554A42-54FC-E411-86D2-0025905A605E.root\"] --runAndLumis=job_lumis_4.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.4"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 33360.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_US_Nebraska"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 4
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2500"
+MATCH_GLIDEIN_ToRetire = 1440616411
+ImageSize = 1750000
+JobCurrentStartDate = 1440081782
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1440081782
+LastMatchTime = 1440081782
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440651211"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins6@glidein.grid.iu.edu"
+SpooledOutputFiles = "jobReport.json.4"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.4"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 33360.0
+JobStatus = 4
+x509UserProxyEmail = "mircho.nikolaev.rodozov@cern.ch"
+DAGManJobId = 1183604
+RemoteWallClockTime = 33360.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+LastRemoteHost = "glidein_1936_57194584@red-d8n12.unl.edu"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 3661158
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "8"
+CRAB_localOutputFiles = "results.root=results_4.root"
+MaxHosts = 1
+CRAB_UserHN = "mrodozov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm.unl.edu"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms3850"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 1727056
+MATCH_EXP_Used_Gatekeeper = "red.unl.edu red.unl.edu:9619"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "red.unl.edu"
+MATCH_GLIDEIN_Gatekeeper = "red.unl.edu red.unl.edu:9619"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "Nebraska"
+UserLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 42
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1440081300
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.4"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_4.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_4.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "results.root" }
+AutoClusterId = 16278
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439550850
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 3
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "Nebraska"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_CH_CERN"
+ClusterId = 1235992
+BytesSent = 597241.0
+CRAB_PublishName = "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_US_Nebraska"
+MATCH_GLIDEIN_MaxMemMBs = 2500
+RequestMemory = 2000
+EnteredCurrentStatus = 1440115142
+MATCH_GLIDEIN_SiteWMS = "HTCondor"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "5092137.0"
+CRAB_JobSW = "CMSSW_7_4_7_patch2"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 1400
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "slot1_6@red-d8n12.unl.edu"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440115142
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440651211
+NiceUser = false
+RootDir = "/"
+CommittedTime = 33360
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "HTCondor"
+
+MaxWallTimeMins_RAW = 1400
+StatsLifetimeStarter = 31968
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "red-gw1.unl.edu"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins5@gfactory-1.t2.ucsd.edu"
+SubmitEventNotes = "DAG Node: Job3"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+MATCH_GLIDEIN_Site = "Nebraska"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 27257.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "3043383"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+CurrentHosts = 0
+RecentBlockWrites = 0
+MATCH_GLIDEIN_ProcId = 14
+x509UserProxyExpiration = 1440171330
+Iwd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_gw1_long"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 4250000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/129.93.183.127"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "5096573.0"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job3"
+LastPublicClaimId = "<129.93.183.127:56441>#1440063351#7#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_US_Nebraska"
+RemoteSysCpu = 621.0
+CRAB_Retry = 3
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1174388
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2500"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms3850"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_gw1_long"
+LastJobLeaseRenewal = 1440113502
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_US_Nebraska"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.mrodozov"
+MATCH_GLIDEIN_SiteWMS_Slot = "slot1_32@red-d23n7.unl.edu"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms3850@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm.unl.edu"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2500
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440113503
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 0
+MATCH_GLIDEIN_Factory = "SDSC"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1235991.0#1440081300"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 3043383
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1440081533
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440630710"
+MATCH_EXP_JOB_GLIDEIN_Factory = "SDSC"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 44879356.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=3 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/06DE7D5F-D4FB-E411-9C85-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E29E093E-54FC-E411-8AE5-0025905A60FE.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/F0FDF730-EDFB-E411-842B-00261834B51D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/FECCF363-F5FB-E411-85A3-002590DBDFE0.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/0E4CEBFE-ECFB-E411-9F0C-842B2B29273C.root\",' '\"/store/m
 c/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/ECF66DCC-F0FB-E411-84CF-00259074AE32.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/96F29C69-D4FB-E411-9028-842B2B292627.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/6E887F0F-EDFB-E411-875B-BCAEC54B303A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/0C788712-F5FB-E411-AA0E-AC853D9DAC29.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/723A41AE-F4FB-E411-BAA3-0025905C431A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/DA4EA0F5-F4FB-E411-B2AD-00259073E31C.root\",' '\"/store/mc/RunIISpring
 15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/98C8F097-F7FB-E411-9A1F-52540006FB8D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/DE4F8235-5FFC-E411-80CD-0025905A6088.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/EA5D6151-F5FB-E411-99F0-0026B92E0C74.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/205D0CF9-F4FB-E411-934D-000F532734AC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/24BCAED9-F0FB-E411-A35B-00259074AE54.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/A4C160C1-F4FB-E411-A66D-B083FED76C6C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt
 _300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E86B536C-54FC-E411-8787-AC853D9DACE1.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/2E68E42D-EDFB-E411-8027-001E67397CC9.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/A056D12B-EDFB-E411-9E51-52540006FDD6.root\"] --runAndLumis=job_lumis_3.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.3"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 31976.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_US_Nebraska"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 3
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2500"
+MATCH_GLIDEIN_ToRetire = 1440630710
+ImageSize = 2000000
+JobCurrentStartDate = 1440081527
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1440081527
+LastMatchTime = 1440081527
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440665510"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins5@gfactory-1.t2.ucsd.edu"
+SpooledOutputFiles = "jobReport.json.3"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.3"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 31976.0
+JobStatus = 4
+x509UserProxyEmail = "mircho.nikolaev.rodozov@cern.ch"
+DAGManJobId = 1183604
+RemoteWallClockTime = 31976.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+LastRemoteHost = "glidein_11321_920434792@red-d23n7.unl.edu"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 4111436
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "14"
+CRAB_localOutputFiles = "results.root=results_3.root"
+MaxHosts = 1
+CRAB_UserHN = "mrodozov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm.unl.edu"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms3850"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 1756756
+MATCH_EXP_Used_Gatekeeper = "red-gw1.unl.edu red-gw1.unl.edu:9619"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "red-gw1.unl.edu"
+MATCH_GLIDEIN_Gatekeeper = "red-gw1.unl.edu red-gw1.unl.edu:9619"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "Nebraska"
+UserLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 42
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1440081300
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.3"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_3.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_3.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "results.root" }
+AutoClusterId = 16278
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439550850
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 3
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "Nebraska"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_CH_CERN"
+ClusterId = 1235991
+BytesSent = 604821.0
+CRAB_PublishName = "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_US_Nebraska"
+MATCH_GLIDEIN_MaxMemMBs = 2500
+RequestMemory = 2000
+EnteredCurrentStatus = 1440113503
+MATCH_GLIDEIN_SiteWMS = "HTCondor"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "5096573.0"
+CRAB_JobSW = "CMSSW_7_4_7_patch2"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 1400
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "slot1_32@red-d23n7.unl.edu"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440113503
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440665510
+NiceUser = false
+RootDir = "/"
+CommittedTime = 31976
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "HTCondor"
+

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/resources/functional_tests.txt
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/functional_tests.txt b/asterix-external-data/src/test/resources/functional_tests.txt
new file mode 100644
index 0000000..42c0e95
--- /dev/null
+++ b/asterix-external-data/src/test/resources/functional_tests.txt
@@ -0,0 +1,362 @@
+/////////////////////////////////
+echo Testing basic math...
+eval x = (1 + 2)
+same $x, 3
+eval x = (3 - 1) 
+same $x, 2
+eval x = (2 * 3)
+same $x, 6
+eval x = (8 / 2)
+same $x, 4
+
+echo Testing extreme numbers...
+same string(real("INF")), "real(\"INF\")"
+same string(real("-INF")), "real(\"-INF\")"
+same string(real("NaN")), "real(\"NaN\")"
+diff real("NaN"), real("NaN")
+same real("INF"), real("INF")
+same real("-INF"), real("-INF")
+diff real("INF"), real("-INF")
+same 0.0, -(0.0)
+same 0.0, real("-0.0")
+same string(0.0), "0.0"
+same string(-0.0), "-0.0"
+
+/////////////////////////////////
+echo Testing basic attributes in a ClassAd...
+eval x = [
+            a = 1;
+            b = 2.0;
+            c = "alain";
+            d = true;
+            atime = absTime("2004-01-01");
+            rtime = relTime("2+25:14:16.123");
+            l = {1, 1, 2, 3, 5};
+            e = error;
+            u = undefined;
+         ]
+same $x.a, 1
+same $x.b, 2.0
+same $x.c, "alain"
+same $x.d, true
+same $x.atime, absTime("2004-01-01");
+same $x.rtime, relTime("2+25:14:16.123");
+same $x.l, {1, 1, 2, 3, 5}
+same $x.l[4], 5
+same $x.e, error
+same $x.u, undefined
+same isinteger($x.a), true
+same isinteger($x.b), false
+same isreal($x.b), true
+same isreal($x.c), false
+same isstring($x.c), true
+same isstring($x.d), false
+same isboolean($x.d), true
+same isboolean($x.c), false
+same isabstime($x.atime), true
+same isabstime($x.rtime), false
+same isreltime($x.rtime), true
+same isreltime($x.atime), false
+same islist($x.l), true
+same islist($x.a), false
+same iserror($x.e), true
+same iserror($x.u), false
+same isundefined($x.u), true
+same isundefined($x.e), false
+
+// Note that testing XML relies on the ClassAd from
+// the above testing.
+// echo Testing XML...
+// eval y = [ a = 2; b = "Lisp rocks"; ]
+// writexml tmp.xml {$x, $y}
+// readxml z tmp.xml
+// same $x, $z[0]
+// same $y, $z[1]
+
+/////////////////////////////////
+echo Testing select on lists...
+eval x = {
+           [a = 3; b = "I see London"],
+           [a = 2; b = "I see France"],
+           [a = 1; b = "I see Alain's funky pants"]
+         }
+same $x.a, {3, 2, 1}
+same $x.b, {"I see London", "I see France", "I see Alain's funky pants"}
+same $x.c, {undefined, undefined, undefined}
+same {}.a, {}
+
+/////////////////////////////////
+echo Testing subscripts
+eval x = [
+           a = 3;
+           b = "alain";
+           ab = 4;
+         ]
+same $x["a"], 3
+same $x["b"], "alain"
+same $x["c"], error
+eval d = $x["c"]
+same $x[strcat("a", "b")], 4
+eval x = {"a", "b", "c"}
+same $x[0], "a"
+same $x[1], "b"
+same $x[2], "c"
+same $x[3], error
+
+/////////////////////////////////
+echo Testing multiple semicolons...
+eval x = [
+           ;;
+           a = 3;;
+           b = 4;;
+         ]
+
+/////////////////////////////////
+echo Testing functions...
+same int(3), 3
+same int(3.9), 3
+same int("3.9"), 3
+same int(absTime("1970-01-01T:00:00:01Z")), 1
+same int(reltime("01:00:01")), 3601
+eval y = int(absTime("1970-01-01T:00:00:01Z"))
+same $y, 1
+
+same real(3), 3.0
+same real(3.9), 3.9
+same real("3.9"), 3.9
+same real(absTime("1970-01-01T:00:00:01Z")), 1.0
+same real(reltime("01:00:01")), 3601.0
+
+same string("alain"), "alain"
+same string(1), "1"
+
+same floor(3.9), 3
+same floor("3.9"), 3
+
+same ceiling(3.9), 4
+same ceiling("3.9"), 4
+
+same round(3.1), 3
+same round(3.9), 4
+
+same strcat("", "roy"), "roy"
+same strcat("alain", ""), "alain"
+same strcat("alain", "roy"), "alainroy"
+same strcat(14, " bottles of root beer"), "14 bottles of root beer"
+
+same substr("abcde", 1), "bcde"
+same substr("abcde", 4), "e"
+same substr("abcde", 5), ""
+same substr("abcde", 1, 2), "bc"
+same substr("abcde", 4, 2), "e"
+
+same strcmp("alain", "roy") < 0, true
+same strcmp("roy", "alain") > 0, true
+same strcmp("alain", "alain"), 0
+
+same stricmp("alain", "ALAIN"), 0
+same stricmp("alain", "roy") < 0, true
+
+same tolower("ALAIN"), "alain")
+same toupper("alain"), "ALAIN")
+same tolower(true), "true")
+same toupper(true), "TRUE")
+
+same member(1, {1, 2, 3}), true
+same member(4, {1, 2, 3}), false
+
+same regexp("Alain.*Roy", "Alain Aslag Roy"), true
+same regexp("alain.*roy", "Alain Aslag Roy"), false
+same regexp("alain.*roy", "Alain Aslag Roy", "i"), true
+
+//same regexpMember("b.*", {}), false
+//same regexpMember("b.*", {"aa"}), false
+//same regexpMember("b.*", {"aa", "bb"}), true
+//same regexpMember("b.*", {"bb", "aa"}), true
+//same regexpMember("b.*", {1, "bb"} ), error
+
+
+eval t = absTime("1970-01-02T:03:04:05Z")
+same splitTime($t).year, 1970
+same splitTime($t).month, 1
+same splitTime($t).day, 2
+same splitTime($t).hours, 3
+same splitTime($t).minutes, 4
+same splitTime($t).seconds, 5
+same splitTime($t).offset, 0
+
+
+
+eval t = absTime("1970-01-02T:03:04:05-06:00")
+eval tt = splitTime($t)
+same splitTime($t).year, 1970
+same splitTime($t).month, 1
+same splitTime($t).day, 2
+same splitTime($t).hours, 3
+same splitTime($t).minutes, 4
+same splitTime($t).seconds, 5
+same splitTime($t).offset, -21600
+
+
+
+eval t = relTime("1d2h3m4.5s")
+eval tt = splitTime($t)
+same splitTime($t).days, 1
+same splitTime($t).hours, 2
+same splitTime($t).minutes, 3
+same splittime($t).seconds, 4.5
+eval tt = splitTime($t)
+
+
+eval t = absTime("1997-08-30T16:04:05-0500")
+eval f = formatTime($t, "%m %d %Y")
+same $f, "08 30 1997"
+eval f = formatTime($t, "%H %M %S")
+same $f, "16 04 05"
+eval f = formatTime($t, "%A %a")
+same $f, "Saturday Sat"
+eval f = formatTime($t, "%B %b")
+same $f, "August Aug"
+eval f = formatTime(splitTime($t), "%H:%M:%S")
+same $f, "16:04:05"
+eval f = formatTime($t)
+same $f, "Sat Aug 30 16:04:05 1997"
+
+same size({}), 0
+same size({1}), 1
+same size({1, 2, 3, 4, 5}), 5
+same size([]), 0
+same size([a = 1;]), 1
+same size([a = 1; b = 2;]), 2
+same size(""), 0
+same size("a"), 1
+same size("ab"), 2
+same size(3), error
+same size(3.4), error
+
+eval list0 = {}
+eval list1 = {1}
+eval list5 = {1, 2, 3, 4, 5}
+
+same sum($list0), undefined
+same avg($list0), undefined
+same min($list0), undefined
+same max($list0), undefined
+// #### Do we really want these to be false and true?
+same anycompare("<", $list0, 3), false
+same allcompare("<", $list0, 3), true
+
+same sum($list1), 1
+same avg($list1), 1.0
+same min($list1), 1
+same max($list1), 1
+same anycompare("<", $list1, 3), true
+same allcompare("<", $list1, 3), true
+
+same sum($list5), 15
+same avg($list5), 3.0
+same min($list5), 1
+same max($list5), 5
+same anycompare("<", $list5, 3), true
+same allcompare("<", $list5, 3), false
+
+same ifThenElse(1+1==2, 3, 4), 3
+same ifThenElse(1+1==3,3,4), 4
+same ifThenElse(ERROR,3,4), ERROR
+same ifThenElse(UNDEFINED,3,4), UNDEFINED
+
+same interval(1), "1"
+same interval(60*2 + 1), "2:01"
+same interval(3600*3 + 60*2 + 1), "3:02:01"
+same interval(3600*24*4 + 3600*3 + 60*2 + 1), "4+03:02:01"
+
+//same regexps("[abc]*([def]*)[ghi]*","aaaabbbbcccccdddeeefffggghhhiii","\\1"), "dddeeefff"
+//same regexps("[abc]*([def]*)[ghi]*","abcdefghi","\\0"), "abcdefghi"
+//same regexps("[abc]*([def]*)[ghi]*","abcdefghi","\\2"), error
+//same regexps("[abc]*([def]*)[ghi]*","NO","\\0"), ""
+
+
+echo Testing eval
+same eval("1+1"), 2
+same eval(1+1), 2
+same eval("1+"), ERROR
+eval x = [ A = 1; B = 2; C = eval("A+B"); ]
+same $x.C, 3
+
+echo Testing boolean expressions
+echo Testing && operator
+same false, false && false
+same false, false && undefined
+same false, false && true
+same false, false && error
+
+same false, undefined && false
+same undefined, undefined && undefined
+same undefined, undefined && true
+same error, undefined && error
+
+same false, true && false
+same undefined, true && undefined
+same true, true && true
+same error, true && error
+
+same error, error && false
+same error, error && undefined
+same error, error && true
+same error, error && error
+
+
+echo Testing || operator
+same false, false || false
+same undefined, false || undefined
+same true, false || true
+same error, false || error
+
+same undefined, undefined || false
+same undefined, undefined || undefined
+same true, undefined || true
+same error, undefined || error
+
+same true, true || false
+same true, true || undefined
+same true, true || true
+same true, true || error
+
+same error, error || false
+same error, error || undefined
+same error, error || true
+same error, error || error
+
+
+echo Testing ! operator
+same true, !false
+same undefined, !undefined
+same false, !true
+same error, !error
+
+
+echo Testing ? operator
+same false, false ? true : false
+same true, false ? false : true
+same undefined, false ? true : undefined
+same true, false ? undefined : true
+
+same true, true ? true : false
+same false, true ? false : true
+same true, true ? true : undefined
+same undefined, true ? undefined : true
+
+echo Testing characters with negative ascii values
+// # the following used to not even parse on some systems
+same "–", "–"
+
+echo Testing stringListsIntersect()
+same true, stringListsIntersect("one,two","two,three")
+same false, stringListsIntersect("one,two","three,four")
+same false, stringListsIntersect("one,two","three,four",";")
+same true, stringListsIntersect("one,two","one")
+same true, stringListsIntersect("one, two","two, three")
+same true, stringListsIntersect("one,two","two,three",",")
+same true, stringListsIntersect("one;two","two;three",";")
+same undefined, stringListsIntersect("one,two",undefined)
+same undefined, stringListsIntersect(undefined,"one,two" )


[21/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.4.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.4.update.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.4.update.aql
new file mode 100644
index 0000000..e1e93ca
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.4.update.aql
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed which uses an external parser to parse data from files
+ *                The files have duplicates and long records
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+
+use dataverse externallibtest;
+
+set wait-for-completion-feed "true";
+
+connect feed CondorFeed to dataset Condor;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.5.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.5.query.aql
new file mode 100644
index 0000000..a861633
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.5.query.aql
@@ -0,0 +1,29 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+ /*
+ * Description  : Create a feed which uses an external parser to parse data from files
+ *                The files have duplicates and long records
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+use dataverse externallibtest;
+
+for $x in dataset Condor
+order by $x.GlobalJobId
+return $x;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.6.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.6.lib.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.6.lib.aql
new file mode 100644
index 0000000..86af80f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.6.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.7.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.7.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.7.ddl.aql
new file mode 100644
index 0000000..16ada23
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.7.ddl.aql
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed which uses an external parser to parse data from files
+ *                The files have duplicates and long records
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+drop dataverse externallibtest if exists;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql
index bc95f5e..67009c5 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_06/feeds_06.3.sleep.aql
@@ -16,4 +16,4 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-2000
+3000

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql
index 29d25f5..10fa5bb 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_07/feeds_07.1.ddl.aql
@@ -32,30 +32,27 @@ create dataverse feeds;
 use dataverse feeds;
 
 create type TwitterUserType as closed {
-	screen-name: string,
-	lang: string,
-	friends_count: int32,
-	statuses_count: int32,
-	name: string,
-	followers_count: int32
+ screen_name: string,
+ language: string,
+ friends_count: int32,
+ status_count: int32,
+ name: string,
+ followers_count: int32
 } 
 
 create type TweetMessageType as closed {
-	id: int64,
-    user: TwitterUserType,
-	latitude: double,
-	longitude: double,
-	send-time: datetime,
-	message_text: string,
-	created_at: string,
-	country: string
+ id: int64,
+ user: TwitterUserType,
+ latitude: double,
+ longitude: double,
+ message_text: string,
+ created_at: string,
+ country: string
 }
 
 create dataset SyntheticTweets(TweetMessageType)
 primary key id;
 
-create index locationIdx on SyntheticTweets(sender-location) type rtree;
-
 create feed  SyntheticTweetFeed
 using twitter_firehose
-(("duration"="5"),("tps"="50"),("type-name"="TweetMessageType"),("tput-duration"="5"),("dataverse-dataset"="feeds:SyntheticTweets"),("mode"="controlled"));
+(("duration"="5"),("tps"="50"),("type-name"="TweetMessageType"),("reader"="adm"),("format"="adm"),("reader-stream"="twitter_firehose"),("tput-duration"="5"),("dataverse-dataset"="feeds:SyntheticTweets"),("mode"="controlled"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql
index b71b422..85b4747 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_08/feeds_08.1.ddl.aql
@@ -32,22 +32,22 @@ create dataverse feeds;
 use dataverse feeds;
 
 create type TwitterUserType as closed {
-	screen_name: string,
-	language: string,
-	friends_count: int32,
-	status_count: int32,
-	name: string,
-	followers_count: int32
+ screen_name: string,
+ language: string,
+ friends_count: int32,
+ status_count: int32,
+ name: string,
+ followers_count: int32
 } 
 
 create type TweetMessageType as closed {
-	id: int64,
-    user: TwitterUserType,
-    latitude: double,
-	longitude: double,
-	message_text: string,
-	created_at: string,
-	country: string
+ id: int64,
+ user: TwitterUserType,
+ latitude: double,
+ longitude: double,
+ message_text: string,
+ created_at: string,
+ country: string
 }
 
 create dataset SyntheticTweets(TweetMessageType)
@@ -57,4 +57,4 @@ create index ngram_index on SyntheticTweets(message_text) type ngram(3);
 
 create feed  SyntheticTweetFeed
 using twitter_firehose
-(("duration"="5"),("tps"="50"),("type-name"="TweetMessageType"),("tput-duration"="5"),("dataverse-dataset"="feeds:SyntheticTweets"),("mode"="controlled"));
+(("duration"="5"),("tps"="50"),("type-name"="TweetMessageType"),("tput-duration"="5"),("dataverse-dataset"="feeds:SyntheticTweets"),("reader"="adm"),("format"="adm"),("reader-stream"="twitter_firehose"),("mode"="controlled"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql
index 54bbd3c..8664745 100644
--- a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.1.ddl.aql
@@ -32,22 +32,22 @@ create dataverse feeds_09;
 use dataverse feeds_09;
 
 create type TwitterUserType as closed {
-	screen_name: string,
-	language: string,
-	friends_count: int32,
-	status_count: int32,
-	name: string,
-	followers_count: int32
+ screen_name: string,
+ language: string,
+ friends_count: int32,
+ status_count: int32,
+ name: string,
+ followers_count: int32
 } 
 
 create type TweetMessageType as closed {
-    id: int64,
-    user: TwitterUserType,
-    latitude: double,
-    longitude: double,
-    message_text: string,
-    created_at: string,
-    country: string
+ id: int64,
+ user: TwitterUserType,
+ latitude: double,
+ longitude: double,
+ message_text: string,
+ created_at: string,
+ country: string
 }
 
 create dataset SyntheticTweets(TweetMessageType)
@@ -57,4 +57,4 @@ create index message_text on SyntheticTweets(message_text) type btree;
 
 create feed  SyntheticTweetFeed
 using twitter_firehose
-(("duration"="5"),("tps"="50"),("tput-duration"="5"),("type-name"="TweetMessageType"),("dataverse-dataset"="feeds:SyntheticTweets"),("mode"="controlled"));
+(("duration"="5"),("tps"="50"),("tput-duration"="5"),("type-name"="TweetMessageType"),("dataverse-dataset"="feeds:SyntheticTweets"),("reader"="adm"),("format"="adm"),("reader-stream"="twitter_firehose"),("mode"="controlled"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.4.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.4.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.4.ddl.aql
new file mode 100644
index 0000000..3cce0ff
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_09/feeds_09.4.ddl.aql
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed using the synthetic feed simulator adapter.
+                  Create a dataset that has an associated ngram index.
+                  The synthetic feed simulator uses the Social-Data generator to generate data and simulate a feed.
+                  The feed lasts a configured duration with data arriving at a configured rate (tweets per second).
+                  Verify the existence of data after the feed finishes.
+ * Issue        : 711
+ * Expected Res : Success
+ * Date         : 8th Feb 2014
+ */
+
+drop dataverse feeds_09 if exists;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.4.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.4.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.4.ddl.aql
new file mode 100644
index 0000000..f520f84
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_10/feeds_10.4.ddl.aql
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed using the synthetic feed simulator adapter.
+                  Create a dataset that has an associated ngram index.
+                  The synthetic feed simulator uses the Social-Data generator to generate data and simulate a feed.
+                  The feed lasts a configured duration with data arriving at a configured rate (tweets per second).
+                  Verify the existence of data after the feed finishes.
+ * Issue        : 711
+ * Expected Res : Success
+ * Date         : 8th Feb 2014
+ */
+
+drop dataverse feeds_10 if exists;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.4.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.4.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.4.ddl.aql
new file mode 100644
index 0000000..f6b089d
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feeds_12/feeds_12.4.ddl.aql
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed using the synthetic feed simulator adapter.
+                  Create a dataset that has an associated ngram index.
+                  The synthetic feed simulator uses the Social-Data generator to generate data and simulate a feed.
+                  The feed lasts a configured duration with data arriving at a configured rate (tweets per second).
+                  Verify the existence of data after the feed finishes.
+ * Issue        : 711
+ * Expected Res : Success
+ * Date         : 8th Feb 2014
+ */
+
+drop dataverse feeds_12 if exists;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.4.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.4.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.4.ddl.aql
new file mode 100644
index 0000000..35cd8ec
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/issue_230_feeds/issue_230_feeds.4.ddl.aql
@@ -0,0 +1,30 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed using the synthetic feed simulator adapter.
+                  Create a dataset that has an associated ngram index.
+                  The synthetic feed simulator uses the Social-Data generator to generate data and simulate a feed.
+                  The feed lasts a configured duration with data arriving at a configured rate (tweets per second).
+                  Verify the existence of data after the feed finishes.
+ * Issue        : 711
+ * Expected Res : Success
+ * Date         : 8th Feb 2014
+ */
+
+drop dataverse feeds if exists;


[09/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
new file mode 100644
index 0000000..92c6ba1
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdToADMTest.java
@@ -0,0 +1,132 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.test;
+
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Map.Entry;
+
+import org.apache.asterix.external.api.IRawRecord;
+import org.apache.asterix.external.classad.CaseInsensitiveString;
+import org.apache.asterix.external.classad.CharArrayLexerSource;
+import org.apache.asterix.external.classad.ClassAd;
+import org.apache.asterix.external.classad.ExprTree;
+import org.apache.asterix.external.classad.Value;
+import org.apache.asterix.external.input.record.reader.stream.SemiStructuredRecordReader;
+import org.apache.asterix.external.input.stream.LocalFileSystemInputStream;
+import org.apache.asterix.external.library.ClassAdParser;
+import org.apache.asterix.external.util.ExternalDataConstants;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+public class ClassAdToADMTest extends TestCase {
+    /**
+     * Create the test case
+     *
+     * @param testName
+     *            name of the test case
+     */
+    public ClassAdToADMTest(String testName) {
+        super(testName);
+    }
+
+    /**
+     * @return the suite of tests being tested
+     */
+    public static Test suite() {
+        return new TestSuite(ClassAdToADMTest.class);
+    }
+
+    /**
+     *
+     */
+    public void testApp() {
+        try {
+            // test here
+            ClassAd pAd = new ClassAd();
+            String[] files = new String[] { "/jobads.txt" };
+            ClassAdParser parser = new ClassAdParser();
+            CharArrayLexerSource lexerSource = new CharArrayLexerSource();
+            for (String path : files) {
+                SemiStructuredRecordReader recordReader = new SemiStructuredRecordReader();
+                HashMap<String, String> configuration = new HashMap<String, String>();
+                configuration.put(ExternalDataConstants.KEY_RECORD_START, "[");
+                configuration.put(ExternalDataConstants.KEY_RECORD_END, "]");
+                recordReader.configure(configuration);
+                LocalFileSystemInputStream in = new LocalFileSystemInputStream(
+                        Paths.get(getClass().getResource(path).toURI()), null, false);
+                in.configure(configuration);
+                recordReader.setInputStream(in);
+                Value val = new Value();
+                int i = 0;
+                while (recordReader.hasNext()) {
+                    i++;
+                    System.out.print("{ ");
+                    val.clear();
+                    IRawRecord<char[]> record = recordReader.next();
+                    lexerSource.setNewSource(record.get());
+                    parser.setLexerSource(lexerSource);
+                    parser.parseNext(pAd);
+                    //System.out.println(pAd);
+                    Map<CaseInsensitiveString, ExprTree> attrs = pAd.getAttrList();
+                    boolean notFirst = false;
+                    for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
+                        CaseInsensitiveString name = entry.getKey();
+                        ExprTree tree = entry.getValue();
+                        if (notFirst) {
+                            System.out.print(", ");
+                        }
+                        notFirst = true;
+                        switch (tree.getKind()) {
+                            case ATTRREF_NODE:
+                            case CLASSAD_NODE:
+                            case EXPR_ENVELOPE:
+                            case EXPR_LIST_NODE:
+                            case FN_CALL_NODE:
+                            case OP_NODE:
+                                if (pAd.evaluateAttr(name.get(), val)) {
+                                    System.out.print("\"" + name + "Expr\":" + "\"expr=" + tree + "\"");
+                                    System.out.print(", \"" + name + "\":" + val);
+                                } else {
+                                    System.out.print("\"" + name + "\":" + tree);
+                                }
+                                break;
+                            case LITERAL_NODE:
+                                // No need to do anything
+                                System.out.print("\"" + name + "\":" + tree);
+                                break;
+                            default:
+                                System.out.println("Something is wrong");
+                                break;
+                        }
+                    }
+                    System.out.println(" }");
+                }
+                System.out.println(i + " number of records found");
+                recordReader.close();
+            }
+        } catch (Exception e) {
+            e.printStackTrace();
+            assertTrue(false);
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java
new file mode 100644
index 0000000..c74fd03
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTest.java
@@ -0,0 +1,61 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.test;
+
+import java.io.IOException;
+
+import junit.framework.Test;
+import junit.framework.TestCase;
+import junit.framework.TestSuite;
+
+/**
+ * Unit test for simple App.
+ */
+public class ClassAdUnitTest extends TestCase {
+    /**
+     * Create the test case
+     *
+     * @param testName
+     *            name of the test case
+     */
+    public ClassAdUnitTest(String testName) {
+        super(testName);
+    }
+
+    /**
+     * @return the suite of tests being tested
+     */
+    public static Test suite() {
+        return new TestSuite(ClassAdUnitTest.class);
+    }
+
+    /**
+     * Rigourous Test :-)
+     */
+    public void testApp() {
+        String[] args = { "", "-d", "-vv", "-all" };
+        try {
+            ClassAdUnitTester.test(args.length, args);
+        } catch (IOException e) {
+            e.printStackTrace();
+            assertTrue(false);
+        }
+        assertTrue(true);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
new file mode 100644
index 0000000..561b541
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/test/ClassAdUnitTester.java
@@ -0,0 +1,796 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad.test;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.TreeSet;
+
+import org.apache.asterix.external.classad.AMutableCharArrayString;
+import org.apache.asterix.external.classad.ClassAd;
+import org.apache.asterix.external.classad.ClassAdTime;
+import org.apache.asterix.external.classad.ExprList;
+import org.apache.asterix.external.classad.ExprTree;
+import org.apache.asterix.external.classad.Literal;
+import org.apache.asterix.external.classad.Util;
+import org.apache.asterix.external.classad.Value;
+import org.apache.asterix.external.classad.Value.ValueType;
+import org.apache.asterix.external.library.ClassAdParser;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.asterix.om.base.AMutableString;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ClassAdUnitTester {
+    static class Parameters {
+        public boolean debug;
+        public boolean verbose;
+        public boolean veryVerbose;
+
+        public boolean checkAll;
+        public boolean checkParsing;
+        public boolean checkClassad;
+        public boolean checkExprlist;
+        public boolean checkValue;
+        public boolean checkLiteral;
+        public boolean checkMatch;
+        public boolean checkOperator;
+        public boolean checkCollection;
+        public boolean checkUtils;
+
+        /*********************************************************************
+         * Function: Parameters::ParseCommandLine
+         * Purpose: This parses the command line. Note that it will exit
+         * if there are any problems.
+         *********************************************************************/
+        public void ParseCommandLine(int argc, String[] argv) {
+
+            boolean selectedTest = false;
+            boolean help = false;
+
+            // First we set up the defaults.
+            debug = false;
+            verbose = false;
+            veryVerbose = false;
+            checkAll = false;
+            checkParsing = false;
+            checkClassad = false;
+            checkExprlist = false;
+            checkValue = false;
+            checkLiteral = false;
+            checkMatch = false;
+            checkOperator = false;
+            checkCollection = false;
+            checkUtils = false;
+
+            // Then we parse to see what the user wants.
+            for (int argIndex = 1; argIndex < argc; argIndex++) {
+                if (argv[argIndex].equalsIgnoreCase("-h") || argv[argIndex].equalsIgnoreCase("-help")) {
+                    help = true;
+                    break;
+                } else if (argv[argIndex].equalsIgnoreCase("-d") || argv[argIndex].equalsIgnoreCase("-debug")) {
+                    debug = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-v") || argv[argIndex].equalsIgnoreCase("-verbose")) {
+                    verbose = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-vv") || argv[argIndex].equalsIgnoreCase("-veryverbose")) {
+                    verbose = true;
+                    veryVerbose = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-all")) {
+                    checkAll = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-parsing")) {
+                    checkParsing = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-classad")) {
+                    checkClassad = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-epxrlist")) {
+                    checkExprlist = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-value")) {
+                    checkValue = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-literal")) {
+                    checkLiteral = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-match")) {
+                    checkMatch = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-operator")) {
+                    checkOperator = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-collection")) {
+                    checkCollection = true;
+                    selectedTest = true;
+                } else if (argv[argIndex].equalsIgnoreCase("-utils")) {
+                    checkUtils = true;
+                    selectedTest = true;
+                } else {
+                    System.out.println("Unknown argument: " + argv[argIndex]);
+                    help = true;
+                }
+            }
+
+            if (help) {
+                System.out.println("Usage: classad_unit_tester [options]");
+                System.out.println();
+                System.out.println("Basic options:");
+                System.out.println("    -h  | -help:        print help");
+                System.out.println("    -d  | -debug:       debug");
+                System.out.println("    -v  | -verbose:     verbose output");
+                System.out.println("    -vv | -veryverbose: very verbose output");
+                System.out.println();
+                System.out.println("Test selectors:");
+                System.out.println("    -all:        all tests listed below (the default)");
+                System.out.println("    -parsing:    test non-ClassAd parsing.");
+                System.out.println("    -classad:    test the ClassAd class.");
+                System.out.println("    -exprlist:   test the ExprList class.");
+                System.out.println("    -value:      test the Value class.");
+                System.out.println("    -literal:    test the Literal class.");
+                System.out.println("    -match:      test the MatchClassAd class.");
+                System.out.println("    -operator:   test the Operator class.");
+                System.out.println("    -collection: test the Collection class.");
+                System.out.println("    -utils:      test little utilities.");
+                System.exit(1);
+            }
+            if (!selectedTest) {
+                checkAll = true;
+            }
+
+            return;
+        }
+    }
+
+    static class Results {
+        public int numberOfErrors;
+        public int numberOfTests;
+        public boolean verbose;
+        public boolean very_verbose;
+
+        public Results(Parameters parameters) {
+            numberOfErrors = 0;
+            numberOfTests = 0;
+            verbose = parameters.verbose;
+            very_verbose = parameters.veryVerbose;
+        }
+
+        public void AddSuccessfulTest(String name, String testLine) {
+            numberOfTests++;
+            if (very_verbose) {
+                System.out.println("SUCCESS: " + name + " on test line " + testLine);
+            }
+        }
+
+        public void AddFailedTest(String name, String testLine) {
+            numberOfErrors++;
+            numberOfTests++;
+
+            System.out.println("FAILURE " + name + " on test line " + testLine);
+            assert (false);
+        }
+
+        public void GetResults(AMutableInt32 numberOfErrors, AMutableInt32 numberOfTests) {
+            numberOfErrors.setValue(this.numberOfErrors);
+            numberOfTests.setValue(this.numberOfTests);
+        }
+    }
+
+    /*********************************************************************
+     * Function: main
+     * Purpose: The main control loop.
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static boolean test(int argc, String[] argv) throws IOException {
+        AMutableInt32 numberOfErrors = new AMutableInt32(0);
+        AMutableInt32 numberOfTests = new AMutableInt32(0);
+        boolean have_errors;
+        Parameters parameters = new Parameters();
+
+        /* ----- Setup ----- */
+        printVersion();
+        parameters.ParseCommandLine(argc, argv);
+        Results results = new Results(parameters);
+
+        /* ----- Run tests ----- */
+        if (parameters.checkAll || parameters.checkParsing) {
+            testParsing(parameters, results);
+        }
+
+        if (parameters.checkAll || parameters.checkClassad) {
+            testClassad(parameters, results);
+        }
+        if (parameters.checkAll || parameters.checkExprlist) {
+            testExprList(parameters, results);
+        }
+        if (parameters.checkAll || parameters.checkValue) {
+            testValue(parameters, results);
+        }
+        if (parameters.checkAll || parameters.checkLiteral) {
+        }
+        if (parameters.checkAll || parameters.checkMatch) {
+        }
+        if (parameters.checkAll || parameters.checkOperator) {
+        }
+        if (parameters.checkAll || parameters.checkCollection) {
+            //test_collection(parameters, results);
+        }
+        if (parameters.checkAll || parameters.checkUtils) {
+            testUtils(parameters, results);
+        }
+
+        /* ----- Report ----- */
+        System.out.println();
+        results.GetResults(numberOfErrors, numberOfTests);
+        if (numberOfErrors.getIntegerValue() > 0) {
+            have_errors = true;
+            System.out.println("Finished with errors: ");
+            System.out.println("    " + numberOfErrors + " errors");
+            System.out.println("    " + numberOfTests + " tests");
+        } else {
+            have_errors = false;
+            System.out.println("Finished with no errors.");
+            System.out.println("    " + numberOfTests + " tests");
+        }
+        return have_errors;
+    }
+
+    public static void test(String name, boolean test, String testLine, Results results) {
+        if (test)
+            results.AddSuccessfulTest(name, testLine);
+        else
+            results.AddFailedTest(name, testLine);
+    }
+
+    public static void test(String name, boolean test, Results results) {
+        test(name, test, name, results);
+    }
+
+    /*********************************************************************
+     * Function: test_parsing
+     * Purpose: Test parsing that isn't ClassAd-specific. (ClassAd-specific
+     * is in test_clasad
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void testParsing(Parameters parameters, Results results) throws IOException {
+        ClassAdParser parser = new ClassAdParser();
+        ExprTree tree;
+
+        // My goal is to ensure that these expressions don't crash
+        // They should also return a null tree
+        tree = parser.ParseExpression("true || false || ;");
+        test("Bad or doesn't crash & isn't bogus", tree == null, "true || false || ;", results);
+
+        tree = parser.ParseExpression("true && false && ;");
+        test("Bad and doesn't crash & isn't bogus", tree == null, "true && false && ;", results);
+
+        tree = parser.ParseExpression("3 | 4 | ;");
+        test("Bad and doesn't crash & isn't bogus", tree == null, "3 | 4 | ;", results);
+
+        tree = parser.ParseExpression("3 ^ 4 ^ ;");
+        test("Bad exclusive or doesn't crash & isn't bogus", tree == null, "3 ^ 4 ^ ;", results);
+
+        tree = parser.ParseExpression("3 & 4 & ;");
+        test("Bad bitwise and doesn't crash & isn't bogus", tree == null, "3 & 4 & ;", results);
+
+        tree = parser.ParseExpression("3 == 4 ==  ;");
+        test("Bad equality doesn't crash & isn't bogus", tree == null, "3 == 4 ==  ;", results);
+
+        tree = parser.ParseExpression("1 < 3 < ;");
+        test("Bad relational doesn't crash & isn't bogus", tree == null, "1 < 3 < ;", results);
+
+        tree = parser.ParseExpression("1 + 3 + ;");
+        test("Bad shift doesn't crash & isn't bogus", tree == null, "1 + 3 + ;", results);
+
+        tree = parser.ParseExpression("1 + 3 + ;");
+        test("Bad additive doesn't crash & isn't bogus", tree == null, "1 + 3 + ;", results);
+
+        tree = parser.ParseExpression("1 * 3 * ;");
+        test("Bad multiplicative doesn't crash & isn't bogus", tree == null, "1 * 3 * ;", results);
+    }
+
+    /*********************************************************************
+     * Function: test_classad
+     * Purpose: Test the ClassAd class.
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void testClassad(Parameters parameters, Results results) throws IOException {
+        ClassAdParser parser = new ClassAdParser();
+        boolean haveAttribute;
+        boolean success;
+
+        System.out.println("Testing the ClassAd class...");
+
+        String input_basic = "[ A = 3; B = 4.0; C = \"babyzilla\"; D = true; E = {1}; F = [ AA = 3; ]; G =\"deleteme\";]";
+        ClassAd basic = new ClassAd();
+        AMutableInt64 i = new AMutableInt64(0);
+        MutableBoolean b = new MutableBoolean();
+        AMutableDouble r = new AMutableDouble(0);
+        AMutableCharArrayString s = new AMutableCharArrayString();
+        ClassAd c = new ClassAd();
+        //ExprList *l;
+
+        basic = parser.parseClassAd(input_basic);
+
+        /* ----- Test EvaluateAttr* ----- */
+        haveAttribute = basic.evaluateAttrInt("A", i);
+        test("Have attribute A", (haveAttribute == true), "test_classad 1", results);
+        test("A is 3", (i.getLongValue() == 3), "test_classad 2", results);
+
+        haveAttribute = basic.evaluateAttrReal("B", r);
+        test("Have attribute B", (haveAttribute == true), "test_classad 3", results);
+        test("B is 4.0", (r.getDoubleValue() == 4.0), "test_classad 4", results);
+
+        haveAttribute = basic.evaluateAttrString("C", s);
+        test("Have attribute C", (haveAttribute == true), "test_classad 5", results);
+        test("C is 'babyzilla'", (s.compareTo("babyzilla") == 0), "test_classad 6", results);
+
+        haveAttribute = basic.evaluateAttrBool("D", b);
+        test("Have attribute D", (haveAttribute == true), "test_classad 7", results);
+        test("D is true", (b.booleanValue() == true), "test_classad 8", results);
+
+        /* ----- Test basic insert and delete ----- */
+        success = basic.insertAttr("new", 4);
+        test("InsertAttr claims to have worked", (success == true), "test_classad 9", results);
+        haveAttribute = basic.evaluateAttrInt("new", i);
+        test("Have new attribute", (haveAttribute == true), "test_classad 10", results);
+        test("new attribute is 4", i.getLongValue() == 4, "test_classad 11", results);
+
+        success = basic.delete("new");
+        test("Delete claims to have worked", (success == true), "test_classad 12", results);
+        haveAttribute = basic.evaluateAttrInt("new", i);
+        test("New attribute was deleted", (haveAttribute == false), "test_classad 13", results);
+
+        success = basic.delete("G");
+        test("DELETE claims to have worked", (success == true), "test_classad 14", results);
+        haveAttribute = basic.evaluateAttrString("G", s);
+        test("Attribute G was deleted", (haveAttribute == false), "test_classad 15", results);
+
+        basic = null;
+
+        /* ----- Test GetExternalReferences ----- */
+        String inputRef = "[ Rank=Member(\"LCG-2_1_0\",other.Environment) ? other.Time/seconds : other.Time/minutes; minutes=60; ]";
+        TreeSet<String> refs = new TreeSet<String>();
+        ExprTree rank;
+
+        c = parser.parseClassAd(inputRef);
+        test("Made classad_ref", (c != null), "Test GetExternalReferences 1", results);
+        if (c != null) {
+            rank = c.lookup("Rank");
+            test("Rank exists", (rank != null), "Test GetExternalReferences 2", results);
+
+            if (rank != null) {
+                boolean haveReferences;
+                if ((haveReferences = c.getExternalReferences(rank, refs, true))) {
+                    test("have_references", (haveReferences == true), "Test GetExternalReferences 3", results);
+
+                    if (haveReferences) {
+                        boolean haveEnvironment;
+                        boolean haveTime;
+                        boolean haveSeconds;
+                        boolean haveOther;
+                        haveEnvironment = false;
+                        haveTime = false;
+                        haveSeconds = false;
+                        haveOther = false;
+                        for (String entry : refs) {
+                            System.out.println(entry);
+                            if (entry.compareTo("other.Environment") == 0) {
+                                haveEnvironment = true;
+                            } else if (entry.compareTo("other.Time") == 0) {
+                                haveTime = true;
+                            } else if (entry.compareTo("seconds") == 0) {
+                                haveSeconds = true;
+                            } else {
+                                haveOther = true;
+                            }
+                        }
+                        test("Have external reference to Environment", (haveEnvironment == true),
+                                "Test GetExternalReferences 4", results);
+                        test("Have external reference to Time", (haveTime == true), "Test GetExternalReferences 5",
+                                results);
+                        test("Have external reference to seconds", (haveSeconds == true),
+                                "Test GetExternalReferences 6", results);
+                        test("Have no other external references", (haveOther != true), "Test GetExternalReferences 7",
+                                results);
+                    }
+                }
+            }
+            c = null;
+        }
+
+        // This ClassAd may cause problems. Perhaps a memory leak.
+        // This test is only useful when run under valgrind.
+        String memoryProblemClassad = "[ Updates = [status = \"request_completed\"; timestamp = absTime(\"2004-12-16T18:10:59-0600]\")] ]";
+        c = parser.parseClassAd(memoryProblemClassad);
+
+        /* ----- Test Parsing multiple ClassAds ----- */
+        String twoClassads = "[ a = 3; ][ b = 4; ]";
+        ClassAd classad1 = new ClassAd();
+        ClassAd classad2 = new ClassAd();
+        AMutableInt32 offset = new AMutableInt32(0);
+
+        parser.parseClassAd(twoClassads, classad1, offset);
+        test("Have good offset #1", offset.getIntegerValue().intValue() == 10, "Test Parsing multiple ClassAds 1",
+                results);
+        parser.parseClassAd(twoClassads, classad2, offset);
+        test("Have good offset #2", offset.getIntegerValue().intValue() == 20, "Test Parsing multiple ClassAds 2",
+                results);
+
+        /* ----- Test chained ClassAds ----- */
+        //classad1 and classad2 from above test are used.
+        ClassAd classad3 = new ClassAd();
+
+        classad1.chainToAd(classad2);
+        test("classad1's parent is classad2", classad1.getChainedParentAd().equals(classad2), "Test chained ClassAds 1",
+                results);
+        haveAttribute = classad1.evaluateAttrInt("b", i);
+        test("chain has attribute b from parent", (haveAttribute == true), "Test chained ClassAds 2", results);
+        test("chain attribute b from parent is 4", (i.getLongValue() == 4), "Test chained ClassAds 3", results);
+
+        haveAttribute = classad1.evaluateAttrInt("a", i);
+        test("chain has attribute a from self", (haveAttribute == true), "Test chained ClassAds 4", results);
+        test("chain attribute a is 3", (i.getLongValue() == 3), "Test chained ClassAds 5", results);
+
+        // Now we modify classad2 (parent) to contain "a".
+        success = classad2.insertAttr("a", 7);
+        test("insert a into parent", (success == true), "Test chained ClassAds 6", results);
+        haveAttribute = classad1.evaluateAttrInt("a", i);
+        test("chain has attribute a from self (overriding parent)", (haveAttribute == true), "Test chained ClassAds 7",
+                results);
+        test("chain attribute a is 3 (overriding parent)", (i.getLongValue() == 3), "Test chained ClassAds 8", results);
+        haveAttribute = classad2.evaluateAttrInt("a", i);
+        test("chain parent has attribute a", (haveAttribute == true), "Test chained ClassAds 9", results);
+        test("chain parent attribute a is 7", (i.getLongValue() == 7), "Test chained ClassAds 10", results);
+
+        success = classad3.copyFromChain(classad1);
+        test("copy from chain succeeded", (success == true), "Test chained ClassAds 11", results);
+        haveAttribute = classad3.evaluateAttrInt("b", i);
+        test("copy of chain has attribute b", (haveAttribute == true), "Test chained ClassAds 12", results);
+        test("copy of chain has attribute b==4", (i.getLongValue() == 4), "Test chained ClassAds 13", results);
+
+        success = classad3.insertAttr("c", 6);
+        test("insert into copy of chain succeeded", (success == true), "Test chained ClassAds 14", results);
+        classad3.copyFromChain(classad1);
+        haveAttribute = classad3.evaluateAttrInt("c", i);
+        test("copy of chain is clean", (haveAttribute == false), "Test chained ClassAds 15", results);
+        classad3.insertAttr("c", 6);
+        success = classad3.updateFromChain(classad1);
+        test("update from chain succeeded", (success == true), "Test chained ClassAds 16", results);
+        haveAttribute = classad3.evaluateAttrInt("c", i);
+        test("update from chain is merged", (haveAttribute == true), "Test chained ClassAds 17", results);
+        test("update from chain has attribute c==6", (i.getLongValue() == 6), "Test chained ClassAds 18", results);
+    }
+
+    /*********************************************************************
+     * Function: test_exprlist
+     * Purpose: Test the ExprList class.
+     *
+     * @throws IOException
+     *********************************************************************/
+    public static void testExprList(Parameters parameters, Results results) throws IOException {
+        System.out.println("Testing the ExprList class...");
+
+        Literal literal10;
+        Literal literal20;
+        Literal literal21;
+
+        List<ExprTree> vector1 = new ArrayList<ExprTree>();
+        List<ExprTree> vector2 = new ArrayList<ExprTree>();
+
+        ExprList list0;
+        ExprList list0Copy;
+        ExprList list1;
+        ExprList list1Copy;
+        ExprList list2;
+        ExprList list2Copy;
+
+        /* ----- Setup Literals, the vectors, then ExprLists ----- */
+        literal10 = Literal.createReal("1.0");
+        literal20 = Literal.createReal("2.0");
+        literal21 = Literal.createReal("2.1");
+
+        vector1.add(literal10);
+        vector2.add(literal20);
+        vector2.add(literal21);
+
+        list0 = new ExprList();
+        list1 = new ExprList(vector1);
+        list2 = new ExprList(vector2);
+
+        /* ----- Did the lists get made? ----- */
+        test("Made list 0", (list0 != null), "Did the lists get made? 0", results);
+        test("Made list 1", (list1 != null), "Did the lists get made? 1", results);
+        test("Made list 2", (list2 != null), "Did the lists get made? 2", results);
+
+        /* ----- Are these lists identical to themselves? ----- */
+        test("ExprList identical 0", list0.sameAs(list0), "Are these lists identical to themselves? 0", results);
+        test("ExprList identical 1", list1.sameAs(list1), "Are these lists identical to themselves? 1", results);
+        test("ExprList identical 2", list2.sameAs(list2), "Are these lists identical to themselves? 2", results);
+
+        /* ----- Are they different from each other? ----- */
+        test("ExprLists different 0-1", !(list0.sameAs(list1)), "Are these lists different from each other? 0",
+                results);
+        test("ExprLists different 1-2", !(list1.sameAs(list2)), "Are these lists identical from each other? 1",
+                results);
+        test("ExprLists different 0-2", !(list0.sameAs(list2)), "Are these lists identical from each other? 2",
+                results);
+
+        /* ----- Check the size of the ExprLists to make sure they are ok ----- */
+        test("ExprList size 0", (list0.size() == 0), "check list size? 0", results);
+        test("ExprList size 1", (list1.size() == 1), "check list size? 1", results);
+        test("ExprList size 2", (list2.size() == 2), "check list size? 2", results);
+
+        /* ----- Make copies of the ExprLists ----- */
+        list0Copy = (ExprList) list0.copy();
+        list1Copy = (ExprList) list1.copy();
+        list2Copy = (ExprList) list2.copy();
+
+        /* ----- Did the copies get made? ----- */
+        test("Made copy of list 0", (list0Copy != null), "Did the copies get made? 0", results);
+        test("Made copy of list 1", (list1Copy != null), "Did the copies get made? 1", results);
+        test("Made copy of list 2", (list2Copy != null), "Did the copies get made? 2", results);
+
+        /* ----- Are they identical to the originals? ----- */
+        test("ExprList self-identity 0", (list0.sameAs(list0Copy)), "Are they identical to the originals? 0", results);
+        test("ExprList self-identity 1", (list1.sameAs(list1Copy)), "Are they identical to the originals? 1", results);
+        test("ExprList self-identity 2", (list2.sameAs(list2Copy)), "Are they identical to the originals? 2", results);
+
+        /* ----- Test adding and deleting from a list ----- */
+        Literal add;
+        add = Literal.createReal("2.2");
+
+        if (list2Copy != null) {
+            list2Copy.insert(add);
+            test("Edited list is different", !(list2.sameAs(list2Copy)), "Test adding and deleting from a list 0",
+                    results);
+            list2Copy.erase(list2Copy.size() - 1);
+            test("Twice Edited list is same", (list2.sameAs(list2Copy)), "Test adding and deleting from a list 1",
+                    results);
+        }
+
+        // Note that we do not delete the Literals that we created, because
+        // they should have been deleted when the list was deleted.
+
+        /* ----- Test an ExprList bug that Nate Mueller found ----- */
+        ClassAd classad;
+        ClassAdParser parser = new ClassAdParser();
+        MutableBoolean b = new MutableBoolean();
+        boolean haveAttribute;
+        boolean canEvaluate;
+        Value value = new Value();
+
+        String listClassadText = "[foo = 3; have_foo = member(foo, {1, 2, 3});]";
+        classad = parser.parseClassAd(listClassadText);
+        haveAttribute = classad.evaluateAttrBool("have_foo", b);
+        test("Can evaluate list in member function", (haveAttribute == true && b.booleanValue() == true),
+                "Test an ExprList bug that Nate Mueller found 0", results);
+
+        canEvaluate = classad.evaluateExpr("member(foo, {1, 2, blah, 3})", value);
+        test("Can evaluate list in member() outside of ClassAd", canEvaluate == true,
+                "Test an ExprList bug that Nate Mueller found 1", results);
+        return;
+    }
+
+    /*********************************************************************
+     * Function: test_value
+     * Purpose: Test the Value class.
+     *
+     * @throws HyracksDataException
+     *********************************************************************/
+    public static void testValue(Parameters parameters, Results results) throws HyracksDataException {
+        Value v = new Value();
+        boolean isExpectedType;
+        System.out.println("Testing the Value class...");
+        test("New value is undefined", (v.isUndefinedValue()), "test_value 1", results);
+        test("New value isn't boolean", !(v.isBooleanValue()), "test_value 2", results);
+        test("GetType gives UNDEFINED_VALUE", (v.getType() == ValueType.UNDEFINED_VALUE), "test_value 3", results);
+
+        v.setErrorValue();
+        test("Is error value", (v.isErrorValue()), "test_value 4", results);
+        test("GetType gives ERROR_VALUE", (v.getType() == ValueType.ERROR_VALUE), "test_value 5", results);
+
+        MutableBoolean b = new MutableBoolean();
+        v.setBooleanValue(true);
+        isExpectedType = v.isBooleanValue(b);
+        test("Value is not undefined", !(v.isUndefinedValue()), "Value is not undefined", results);
+        test("Value is boolean", (v.isBooleanValue()), "Value is boolean", results);
+        test("Try 2: New value is boolean", (isExpectedType == true), "Try 2: New value is boolean", results);
+        test("Boolean is true", (b.booleanValue() == true), "Boolean is true", results);
+        test("GetType gives BOOLEAN_VALUE", (v.getType() == ValueType.BOOLEAN_VALUE), "GetType gives BOOLEAN_VALUE",
+                results);
+
+        AMutableDouble r = new AMutableDouble(0.0);
+        v.setRealValue(1.0);
+        isExpectedType = v.isRealValue(r);
+        test("Value is real", isExpectedType, results);
+        test("Real is 1.0", (r.getDoubleValue() == 1.0), results);
+        test("GetType gives REAL_VALUE", (v.getType() == ValueType.REAL_VALUE), results);
+        test("Real is a number", v.isNumber(), results);
+
+        AMutableInt64 i = new AMutableInt64(0);
+        v.setIntegerValue(1);
+        isExpectedType = v.isIntegerValue(i);
+        test("Value is integer", isExpectedType, results);
+        test("Integer is 1", (i.getLongValue() == 1), results);
+        test("GetType gives INTEGER_VALUE", (v.getType() == ValueType.INTEGER_VALUE), results);
+        test("Integer is a number", v.isNumber(), results);
+
+        AMutableCharArrayString s = new AMutableCharArrayString();
+        v.setStringValue("Robert-Houdin");
+        isExpectedType = v.isStringValue(s);
+        test("Value is String", isExpectedType, results);
+        test("String is 'Robert-Houdin'", (0 == s.compareTo("Robert-Houdin")), results);
+        test("GetType gives STRING_VALUE", (v.getType() == ValueType.STRING_VALUE), results);
+
+        ClassAdTime at = new ClassAdTime(10, 36000000);
+        v.setAbsoluteTimeValue(at);
+        at.setValue(0);
+        at.setTimeZone(0);
+        isExpectedType = v.isAbsoluteTimeValue(at);
+        test("Value is absolute time", isExpectedType, results);
+        test("Absolute time is 10, 0", (10 == at.getTime() && 36000000 == at.getOffset()), results);
+        test("GetType gives ABSOLUTE_TIME_VALUE", (v.getType() == ValueType.ABSOLUTE_TIME_VALUE), results);
+
+        ClassAdTime rt = new ClassAdTime(10, false);
+        v.setRelativeTimeValue(10);
+        isExpectedType = v.isRelativeTimeValue(rt);
+        test("Value is relative time", isExpectedType, results);
+        test("Relative time is 10", (10 == rt.getRelativeTime()), results);
+        test("GetType gives RELATIVE_TIME_VALUE", (v.getType() == ValueType.RELATIVE_TIME_VALUE), results);
+
+        ExprList l = new ExprList();
+        ExprList ll = new ExprList();
+        v.setListValue(l);
+        isExpectedType = v.isListValue(ll);
+        test("Value is list value", isExpectedType, results);
+        test("List value is correct", l.equals(ll), results);
+        test("GetType gives LIST_VALUE", (v.getType() == ValueType.LIST_VALUE), results);
+
+        ExprList sl = new ExprList(true);
+        ll = new ExprList(true);
+        v.setListValue(sl);
+        isExpectedType = v.isListValue(ll);
+        test("Value is list value", isExpectedType, results);
+        test("List value is correct", sl.equals(ll), results);
+        test("GetType gives SLIST_VALUE", (v.getType() == ValueType.SLIST_VALUE), results);
+
+        ClassAd c = new ClassAd();
+        c.insertAttr("test_int", 10);
+        ClassAd cc = new ClassAd();
+        v.setClassAdValue(c);
+        isExpectedType = v.isClassAdValue(cc);
+        test("Value is ClassAd value", isExpectedType, results);
+        test("ClassAd value is correct", c.equals(cc), results);
+        test("GetType gives CLASSAD_VALUE", (v.getType() == ValueType.CLASSAD_VALUE), results);
+        return;
+    }
+
+    /*********************************************************************
+     * This test suite was deleted since I don't think we need it
+     * Function: test_collection
+     * Purpose: Test the ClassAdCollection class. Note that we test the
+     * local Collections only: we don't test the server/client
+     * versions available in ClassAdCollectionServer and
+     * ClassAdCollectionClient.
+     *********************************************************************/
+
+    /*
+     * This one is deleted too since I don't think we need it
+    public static boolean check_in_view(ClassAdCollection collection, String view_name, String classad_name) {
+        boolean have_view;
+        boolean in_view;
+
+        in_view = false;
+
+        LocalCollectionQuery query;
+
+        query.Bind(collection);
+
+        have_view = query.Query(view_name, null);
+        if (have_view) {
+            String classad_key;
+            for (query.ToFirst(), query.Current(classad_key); !query.IsAfterLast(); query.Next(classad_key)) {
+                if (!classad_key.compare(classad_name)) {
+                    in_view = true;
+                    break;
+                }
+            }
+        }
+        return in_view;
+    }
+    */
+
+    /*********************************************************************
+     * Function: test_utils
+     * Purpose: Test utils
+     *********************************************************************/
+    public static void testUtils(Parameters parameters, Results results) {
+        System.out.println("Testing little utilities...");
+
+        test("1800 is not a leap year", !Util.isLeapYear(1800), results);
+        test("1900 is not a leap year", !Util.isLeapYear(1900), results);
+        test("2000 is a leap year", Util.isLeapYear(2000), results);
+        test("2001 is not a leap year", !Util.isLeapYear(2001), results);
+        test("2002 is not a leap year", !Util.isLeapYear(2002), results);
+        test("2003 is not a leap year", !Util.isLeapYear(2003), results);
+        test("2004 is a leap year", Util.isLeapYear(2004), results);
+
+        test("70, 9, 24 . 25469", Util.fixedFromGregorian(70, 9, 24) == 25469, results);
+        test("135, 10, 2 . 49217", Util.fixedFromGregorian(135, 10, 2) == 49217, results);
+        test("470, 1, 8 . 171307", Util.fixedFromGregorian(470, 1, 8) == 171307, results);
+        test("576, 5, 20 . 210155", Util.fixedFromGregorian(576, 5, 20) == 210155, results);
+        test("694,  11, 10 . 253427", Util.fixedFromGregorian(694, 11, 10) == 253427, results);
+        test("1013,  4, 25 . 369740", Util.fixedFromGregorian(1013, 4, 25) == 369740, results);
+        test("1096,  5, 24 . 400085", Util.fixedFromGregorian(1096, 5, 24) == 400085, results);
+        test("1190,  3, 23 . 434355", Util.fixedFromGregorian(1190, 3, 23) == 434355, results);
+        test("1240,  3, 10 . 452605", Util.fixedFromGregorian(1240, 3, 10) == 452605, results);
+        test("1288,  4, 2 . 470160", Util.fixedFromGregorian(1288, 4, 2) == 470160, results);
+        test("1298,  4, 27 . 473837", Util.fixedFromGregorian(1298, 4, 27) == 473837, results);
+        test("1391,  6, 12 . 507850", Util.fixedFromGregorian(1391, 6, 12) == 507850, results);
+        test("1436,  2, 3 . 524156", Util.fixedFromGregorian(1436, 2, 3) == 524156, results);
+        test("1492,  4, 9 . 544676", Util.fixedFromGregorian(1492, 4, 9) == 544676, results);
+        test("1553,  9, 19 . 567118", Util.fixedFromGregorian(1553, 9, 19) == 567118, results);
+        test("1560,  3, 5 . 569477", Util.fixedFromGregorian(1560, 3, 5) == 569477, results);
+        test("1648,  6, 10 . 601716", Util.fixedFromGregorian(1648, 6, 10) == 601716, results);
+        test("1680,  6, 30 . 613424", Util.fixedFromGregorian(1680, 6, 30) == 613424, results);
+        test("1716,  7, 24 . 626596", Util.fixedFromGregorian(1716, 7, 24) == 626596, results);
+        test("1768,  6, 19 . 645554", Util.fixedFromGregorian(1768, 6, 19) == 645554, results);
+        test("1819,  8, 2 . 664224", Util.fixedFromGregorian(1819, 8, 2) == 664224, results);
+        test("1839,  3, 27 . 671401", Util.fixedFromGregorian(1839, 3, 27) == 671401, results);
+        test("1903,  4, 19 . 694799", Util.fixedFromGregorian(1903, 4, 19) == 694799, results);
+        test("1929,  8, 25 . 704424", Util.fixedFromGregorian(1929, 8, 25) == 704424, results);
+        test("1941,  9, 29 . 708842", Util.fixedFromGregorian(1941, 9, 29) == 708842, results);
+        test("1943,  4, 19 . 709409", Util.fixedFromGregorian(1943, 4, 19) == 709409, results);
+        test("1943,  10, 7 . 709580", Util.fixedFromGregorian(1943, 10, 7) == 709580, results);
+        test("1992,  3, 17 . 727274", Util.fixedFromGregorian(1992, 3, 17) == 727274, results);
+        test("1996,  2, 25 . 728714", Util.fixedFromGregorian(1996, 2, 25) == 728714, results);
+        test("2038,  11, 10 . 744313", Util.fixedFromGregorian(2038, 11, 10) == 744313, results);
+        test("2094,  7, 18 . 764652", Util.fixedFromGregorian(2094, 7, 18) == 764652, results);
+
+        AMutableInt32 weekday = new AMutableInt32(0);
+        AMutableInt32 yearday = new AMutableInt32(0);
+        Util.dayNumbers(2005, 1, 1, weekday, yearday);
+        test("Jan 1, 2005.6, 0", weekday.getIntegerValue() == 6 && yearday.getIntegerValue() == 0, results);
+        Util.dayNumbers(2005, 1, 2, weekday, yearday);
+        test("Jan 2, 2005.6, 1", weekday.getIntegerValue() == 0 && yearday.getIntegerValue() == 1, results);
+        Util.dayNumbers(2005, 12, 30, weekday, yearday);
+        test("Dec 30, 2005.5, 363", weekday.getIntegerValue() == 5 && yearday.getIntegerValue() == 363, results);
+        Util.dayNumbers(2005, 12, 31, weekday, yearday);
+        test("Dec 31, 2005.6, 364", weekday.getIntegerValue() == 6 && yearday.getIntegerValue() == 364, results);
+        Util.dayNumbers(2004, 12, 31, weekday, yearday);
+        test("Dec 31, 2005.5, 365", weekday.getIntegerValue() == 5 && yearday.getIntegerValue() == 365, results);
+        return;
+    }
+
+    /*********************************************************************
+     * Function: print_version
+     * Purpose:
+     *********************************************************************/
+    public static void printVersion() {
+        AMutableString classadVersion = new AMutableString(null);
+        ClassAd.classAdLibraryVersion(classadVersion);
+        System.out.println("ClassAd Unit Tester v" + classadVersion + "\n");
+    }
+}


[17/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm
index 558b783..37eb1d4 100644
--- a/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm
+++ b/asterix-app/src/test/resources/runtimets/results/feeds/feeds_03/feeds_03.1.adm
@@ -1 +1 @@
-{ "DataverseName": "feeds", "FeedName": "TweetFeed", "Function": "feed_processor", "FeedType": "PRIMARY", "PrimaryTypeDetails": { "AdapterName": "file_feed", "AdapterConfiguration": {{ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } }} }, "SecondaryTypeDetails": null, "Timestamp": "Sat Jun 20 13:55:59 PDT 2015" }
+{ "DataverseName": "feeds", "FeedName": "TweetFeed", "Function": "feed_processor", "FeedType": "PRIMARY", "PrimaryTypeDetails": { "AdapterName": "file_feed", "AdapterConfiguration": {{ { "Name": "output-type-name", "Value": "TweetType" }, { "Name": "fs", "Value": "localfs" }, { "Name": "path", "Value": "asterix_nc1://data/twitter/obamatweets.adm" }, { "Name": "format", "Value": "adm" }, { "Name": "tuple-interval", "Value": "10" } }} }, "SecondaryTypeDetails": null, "Timestamp": "Sat Jun 20 13:55:59 PDT 2015" }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/testsuite.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/testsuite.xml b/asterix-app/src/test/resources/runtimets/testsuite.xml
index 910d29a..a506bd3 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite.xml
@@ -29,12 +29,27 @@
         QueryOffsetPath="queries"
         QueryFileExtension=".aql">
     <test-group name="feeds">
+        <!-- Fails constantly and not clear what is intended
+        <test-case FilePath="feeds">
+          <compilation-unit name="feeds_06">
+            <output-dir compare="Text">feeds_06</output-dir>
+          </compilation-unit>
+        </test-case> -->
+        <test-case FilePath="feeds">
+            <compilation-unit name="feed-with-external-parser">
+                <output-dir compare="Text">feed-with-external-parser</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="feeds">
+            <compilation-unit name="feeds_07">
+                <output-dir compare="Text">feeds_07</output-dir>
+            </compilation-unit>
+        </test-case>
         <test-case FilePath="feeds">
             <compilation-unit name="feeds_01">
                 <output-dir compare="Text">feeds_01</output-dir>
             </compilation-unit>
         </test-case>
-        <!--Disable it because of sporadic failures. Abdullah will re-enable it.
         <test-case FilePath="feeds">
             <compilation-unit name="feeds_02">
                 <output-dir compare="Text">feeds_02</output-dir>
@@ -45,23 +60,13 @@
                 <output-dir compare="Text">feeds_03</output-dir>
             </compilation-unit>
         </test-case>
+        <!-- Fails constantly
         <test-case FilePath="feeds">
             <compilation-unit name="feeds_04">
                 <output-dir compare="Text">feeds_04</output-dir>
             </compilation-unit>
         </test-case>
-
-        <test-case FilePath="feeds">
-          <compilation-unit name="feeds_06">
-            <output-dir compare="Text">feeds_06</output-dir>
-          </compilation-unit>
-        </test-case>
-        <test-case FilePath="feeds">
-            <compilation-unit name="feeds_07">
-                <output-dir compare="Text">feeds_07</output-dir>
-            </compilation-unit>
-        </test-case>
-
+         -->
         <test-case FilePath="feeds">
             <compilation-unit name="feeds_08">
                 <output-dir compare="Text">feeds_08</output-dir>
@@ -95,14 +100,34 @@
                 <output-dir compare="Text">issue_230_feeds</output-dir>
             </compilation-unit>
         </test-case>
-
+<!-- 
         <test-case FilePath="feeds">
             <compilation-unit name="issue_711_feeds">
                 <output-dir compare="Text">issue_711_feeds</output-dir>
             </compilation-unit>
+        </test-case>  -->
+    </test-group>
+    <test-group name="external-library">
+        <test-case FilePath="external-library">
+            <compilation-unit name="typed_adapter">
+                <output-dir compare="Text">typed_adapter</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="external-library">
+            <compilation-unit name="classad-parser">
+                <output-dir compare="Text">classad-parser</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="external-library">
+            <compilation-unit name="classad-parser2">
+                <output-dir compare="Text">classad-parser2</output-dir>
+            </compilation-unit>
+        </test-case>
+        <test-case FilePath="external-library">
+            <compilation-unit name="getCapital">
+                <output-dir compare="Text">getCapital</output-dir>
+            </compilation-unit>
         </test-case>
-        -->
-
     </test-group>
     <test-group name="flwor">
         <test-case FilePath="flwor">
@@ -6055,13 +6080,13 @@
         <test-case FilePath="load">
             <compilation-unit name="issue14_query">
                 <output-dir compare="Text">issue14_query</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified format parameter for local file system adapter</expected-error>
+                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified ("reader" or "format") parameter for local filesystem adapter</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="load">
             <compilation-unit name="issue315_query">
                 <output-dir compare="Text">none</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified format parameter for local file system adapter</expected-error>
+                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified ("reader" or "format") parameter for local filesystem adapter</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="load">

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml b/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
index 7f3fa95..2965883 100644
--- a/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
+++ b/asterix-app/src/test/resources/runtimets/testsuite_sqlpp.xml
@@ -5771,13 +5771,13 @@
         <test-case FilePath="load">
             <compilation-unit name="issue14_query">
                 <output-dir compare="Text">issue14_query</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified format parameter for local file system adapter</expected-error>
+                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified ("reader" or "format") parameter for local filesystem adapter</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="load">
             <compilation-unit name="issue315_query">
                 <output-dir compare="Text">none</output-dir>
-                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified format parameter for local file system adapter</expected-error>
+                <expected-error>org.apache.asterix.common.exceptions.AsterixException: Unspecified ("reader" or "format") parameter for local filesystem adapter</expected-error>
             </compilation-unit>
         </test-case>
         <test-case FilePath="load">

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-common/src/test/java/org/apache/asterix/test/aql/ITestLibrarian.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/test/java/org/apache/asterix/test/aql/ITestLibrarian.java b/asterix-common/src/test/java/org/apache/asterix/test/aql/ITestLibrarian.java
new file mode 100644
index 0000000..bbbd14e
--- /dev/null
+++ b/asterix-common/src/test/java/org/apache/asterix/test/aql/ITestLibrarian.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.test.aql;
+
+import java.io.IOException;
+import java.rmi.RemoteException;
+
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.exceptions.AsterixException;
+
+public interface ITestLibrarian {
+    public void install(String dvName, String libName, String libPath) throws IOException, Exception;
+
+    public void uninstall(String dvName, String libName) throws RemoteException, AsterixException, ACIDException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
----------------------------------------------------------------------
diff --git a/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java b/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
index ae8c9cb..796be82 100644
--- a/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
+++ b/asterix-common/src/test/java/org/apache/asterix/test/aql/TestExecutor.java
@@ -64,6 +64,9 @@ import org.json.JSONObject;
 
 public class TestExecutor {
 
+    /*
+     * Static variables
+     */
     protected static final Logger LOGGER = Logger.getLogger(TestExecutor.class.getName());
     // see
     // https://stackoverflow.com/questions/417142/what-is-the-maximum-length-of-a-url-in-different-browsers/417184
@@ -71,8 +74,12 @@ public class TestExecutor {
     private static Method managixExecuteMethod = null;
     private static final HashMap<Integer, ITestServer> runningTestServers = new HashMap<>();
 
+    /*
+     * Instance members
+     */
     private String host;
     private int port;
+    private ITestLibrarian librarian;
 
     public TestExecutor() {
         host = "127.0.0.1";
@@ -84,6 +91,10 @@ public class TestExecutor {
         this.port = port;
     }
 
+    public void setLibrarian(ITestLibrarian librarian) {
+        this.librarian = librarian;
+    }
+
     /**
      * Probably does not work well with symlinks.
      */
@@ -127,11 +138,9 @@ public class TestExecutor {
                 ++num;
             }
             lineActual = readerActual.readLine();
-            // Assert.assertEquals(null, lineActual);
             if (lineActual != null) {
                 throw new Exception("Result for " + scriptFile + " changed at line " + num + ":\n< \n> " + lineActual);
             }
-            // actualFile.delete();
         } finally {
             readerExpected.close();
             readerActual.close();
@@ -581,7 +590,8 @@ public class TestExecutor {
                             }
                             break;
                         case "sleep":
-                            Thread.sleep(Long.parseLong(statement.trim()));
+                            String[] lines = statement.split("\n");
+                            Thread.sleep(Long.parseLong(lines[lines.length - 1].trim()));
                             break;
                         case "errddl": // a ddlquery that expects error
                             try {
@@ -641,7 +651,7 @@ public class TestExecutor {
                         case "server": // (start <test server name> <port>
                                        // [<arg1>][<arg2>][<arg3>]...|stop (<port>|all))
                             try {
-                                String[] lines = statement.trim().split("\n");
+                                lines = statement.trim().split("\n");
                                 String[] command = lines[lines.length - 1].trim().split(" ");
                                 if (command.length < 2) {
                                     throw new Exception("invalid server command format. expected format ="
@@ -686,6 +696,36 @@ public class TestExecutor {
                                 throw new Exception("Test \"" + testFile + "\" FAILED!\n", e);
                             }
                             break;
+                        case "lib": // expected format <dataverse-name> <library-name>
+                                    // <library-directory>
+                            // TODO: make this case work well with entity names containing spaces by
+                            // looking for \"
+                            lines = statement.split("\n");
+                            String lastLine = lines[lines.length - 1];
+                            String[] command = lastLine.trim().split(" ");
+                            if (command.length < 3) {
+                                throw new Exception("invalid library format");
+                            }
+                            String dataverse = command[1];
+                            String library = command[2];
+                            switch (command[0]) {
+                                case "install":
+                                    if (command.length != 4) {
+                                        throw new Exception("invalid library format");
+                                    }
+                                    String libPath = command[3];
+                                    librarian.install(dataverse, library, libPath);
+                                    break;
+                                case "uninstall":
+                                    if (command.length != 3) {
+                                        throw new Exception("invalid library format");
+                                    }
+                                    librarian.uninstall(dataverse, library);
+                                    break;
+                                default:
+                                    throw new Exception("invalid library format");
+                            }
+                            break;
                         default:
                             throw new IllegalArgumentException("No statements of type " + ctx.getType());
                     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
index e957ac6..d5b1c6e 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/adapter/factory/GenericAdapterFactory.java
@@ -35,9 +35,12 @@ import org.apache.asterix.external.provider.DatasourceFactoryProvider;
 import org.apache.asterix.external.provider.ParserFactoryProvider;
 import org.apache.asterix.external.util.ExternalDataCompatibilityUtils;
 import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.external.util.FeedUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.dataflow.std.file.FileSplit;
 
 public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterFactory {
 
@@ -48,6 +51,8 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
     private Map<String, String> configuration;
     private List<ExternalFile> files;
     private boolean indexingOp;
+    private boolean isFeed;
+    private FileSplit[] feedLogFileSplits;
 
     @Override
     public void setSnapshot(List<ExternalFile> files, boolean indexingOp) {
@@ -69,12 +74,30 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
      * Runs on each node controller (after serialization-deserialization)
      */
     @Override
-    public IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+    public synchronized IDataSourceAdapter createAdapter(IHyracksTaskContext ctx, int partition) throws Exception {
+        restoreExternalObjects();
         IDataFlowController controller = DataflowControllerProvider.getDataflowController(recordType, ctx, partition,
-                dataSourceFactory, dataParserFactory, configuration, indexingOp);
+                dataSourceFactory, dataParserFactory, configuration, indexingOp, isFeed, feedLogFileSplits);
         return new GenericAdapter(controller);
     }
 
+    private void restoreExternalObjects() throws Exception {
+        if (dataSourceFactory == null) {
+            dataSourceFactory = DatasourceFactoryProvider.getExternalDataSourceFactory(configuration);
+            // create and configure parser factory
+            if (dataSourceFactory.isIndexible() && (files != null)) {
+                ((IIndexibleExternalDataSource) dataSourceFactory).setSnapshot(files, indexingOp);
+            }
+            dataSourceFactory.configure(configuration);
+        }
+        if (dataParserFactory == null) {
+            // create and configure parser factory
+            dataParserFactory = ParserFactoryProvider.getDataParserFactory(configuration);
+            dataParserFactory.setRecordType(recordType);
+            dataParserFactory.configure(configuration);
+        }
+    }
+
     @Override
     public void configure(Map<String, String> configuration, ARecordType outputType) throws Exception {
         this.recordType = outputType;
@@ -83,6 +106,25 @@ public class GenericAdapterFactory implements IIndexingAdapterFactory, IAdapterF
         dataParserFactory = ParserFactoryProvider.getDataParserFactory(configuration);
         prepare();
         ExternalDataCompatibilityUtils.validateCompatibility(dataSourceFactory, dataParserFactory);
+        configureFeedLogManager();
+        nullifyExternalObjects();
+    }
+
+    private void configureFeedLogManager() throws Exception {
+        this.isFeed = ExternalDataUtils.isFeed(configuration);
+        if (isFeed) {
+            feedLogFileSplits = FeedUtils.splitsForAdapter(ExternalDataUtils.getDataverse(configuration),
+                    ExternalDataUtils.getFeedName(configuration), dataSourceFactory.getPartitionConstraint());
+        }
+    }
+
+    private void nullifyExternalObjects() {
+        if (ExternalDataUtils.isExternal(configuration.get(ExternalDataConstants.KEY_READER))) {
+            dataSourceFactory = null;
+        }
+        if (ExternalDataUtils.isExternal(configuration.get(ExternalDataConstants.KEY_PARSER))) {
+            dataParserFactory = null;
+        }
     }
 
     private void prepare() throws Exception {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java
index 3dabb29..8cc4e27 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IInputStreamProvider.java
@@ -18,8 +18,15 @@
  */
 package org.apache.asterix.external.api;
 
+import java.util.Map;
+
 import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.asterix.external.util.FeedLogManager;
 
 public interface IInputStreamProvider {
     public AInputStream getInputStream() throws Exception;
+
+    public void configure(Map<String, String> configuration);
+
+    public void setFeedLogManager(FeedLogManager feedLogManager);
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordFlowController.java
deleted file mode 100644
index c3bdc56..0000000
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordFlowController.java
+++ /dev/null
@@ -1,27 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.external.api;
-
-public interface IRecordFlowController<T> extends IDataFlowController {
-
-    public void setRecordParser(IRecordDataParser<T> dataParser);
-
-    public void setRecordReader(IRecordReader<T> recordReader) throws Exception;
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
index c422c44..1462e9b 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/api/IRecordReader.java
@@ -22,6 +22,8 @@ import java.io.Closeable;
 import java.io.IOException;
 import java.util.Map;
 
+import org.apache.asterix.external.util.FeedLogManager;
+
 /**
  * This interface represents a record reader that reads data from external source as a set of records
  * @param <T>
@@ -57,11 +59,17 @@ public interface IRecordReader<T> extends Closeable {
      */
     public boolean stop();
 
+    // TODO: Find a better way to do flushes, this doesn't fit here
     /**
      * set a pointer to the controller of the feed. the controller can be used to flush()
      * parsed records when waiting for more records to be pushed
      */
-    public default void setController(IDataFlowController controller) throws UnsupportedOperationException {
-        throw new UnsupportedOperationException();
-    };
-}
+    public void setController(IDataFlowController controller);
+
+    // TODO: Find a better way to perform logging. this doesn't fit here
+    /**
+     * set a pointer to the log manager of the feed. the log manager can be used to log
+     * progress and errors
+     */
+    public void setFeedLogManager(FeedLogManager feedLogManager);
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
index ef71769..0c58ee3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/AbstractFeedDataFlowController.java
@@ -20,8 +20,12 @@ package org.apache.asterix.external.dataflow;
 
 import java.util.Map;
 
-import org.apache.asterix.external.api.ITupleForwarder;
+
+import javax.annotation.Nonnull;
+
 import org.apache.asterix.external.api.IDataFlowController;
+import org.apache.asterix.external.api.ITupleForwarder;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -33,6 +37,11 @@ public abstract class AbstractFeedDataFlowController implements IDataFlowControl
     protected Map<String, String> configuration;
     protected static final int NUMBER_OF_TUPLE_FIELDS = 1;
     protected ArrayTupleBuilder tb = new ArrayTupleBuilder(NUMBER_OF_TUPLE_FIELDS);
+    protected FeedLogManager feedLogManager;
+
+    public AbstractFeedDataFlowController(@Nonnull FeedLogManager feedLogManager) {
+        this.feedLogManager = feedLogManager;
+    }
 
     @Override
     public ITupleForwarder getTupleForwarder() {
@@ -45,6 +54,7 @@ public abstract class AbstractFeedDataFlowController implements IDataFlowControl
     }
 
     protected void initializeTupleForwarder(IFrameWriter writer) throws HyracksDataException {
+        tupleForwarder.configure(configuration);
         tupleForwarder.initialize(ctx, writer);
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
index 2a4eaf9..3408af9 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedRecordDataFlowController.java
@@ -20,25 +20,40 @@ package org.apache.asterix.external.dataflow;
 
 import java.util.concurrent.atomic.AtomicBoolean;
 
+import javax.annotation.Nonnull;
+
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
-import org.apache.asterix.external.api.IRecordFlowController;
 import org.apache.asterix.external.api.IRecordReader;
+import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataExceptionUtils;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
+import org.apache.log4j.Logger;
 
-public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowController
-        implements IRecordFlowController<T> {
-    protected IRecordDataParser<T> dataParser;
-    protected IRecordReader<? extends T> recordReader;
+public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowController {
+    private static final Logger LOGGER = Logger.getLogger(FeedRecordDataFlowController.class.getName());
+    protected final IRecordDataParser<T> dataParser;
+    protected final IRecordReader<? extends T> recordReader;
+    protected final AtomicBoolean closed = new AtomicBoolean(false);
     protected long interval;
-    protected AtomicBoolean closed = new AtomicBoolean(false);
+    protected boolean failed = false;
+
+    public FeedRecordDataFlowController(@Nonnull FeedLogManager feedLogManager,
+            @Nonnull IRecordDataParser<T> dataParser, @Nonnull IRecordReader<T> recordReader) {
+        super(feedLogManager);
+        this.dataParser = dataParser;
+        this.recordReader = recordReader;
+        recordReader.setFeedLogManager(feedLogManager);
+        recordReader.setController(this);
+    }
 
     @Override
     public void start(IFrameWriter writer) throws HyracksDataException {
         HyracksDataException hde = null;
         try {
+            failed = false;
             initializeTupleForwarder(writer);
             while (recordReader.hasNext()) {
                 IRawRecord<? extends T> record = recordReader.next();
@@ -50,10 +65,18 @@ public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowControl
                 tb.reset();
                 dataParser.parse(record, tb.getDataOutput());
                 tb.addFieldEndOffset();
+                if (tb.getSize() > tupleForwarder.getMaxRecordSize()) {
+                    // log
+                    feedLogManager.logRecord(record.toString(), ExternalDataConstants.LARGE_RECORD_ERROR_MESSAGE);
+                    continue;
+                }
                 tupleForwarder.addTuple(tb);
             }
         } catch (Throwable th) {
-            hde = new HyracksDataException(th);
+            failed = true;
+            tupleForwarder.flush();
+            LOGGER.warn("Failure during while operating a feed source", th);
+            throw new HyracksDataException(th);
         }
         try {
             tupleForwarder.close();
@@ -63,10 +86,13 @@ public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowControl
         try {
             recordReader.close();
         } catch (Throwable th) {
+            LOGGER.warn("Failure during while operating a feed sourcec", th);
             hde = ExternalDataExceptionUtils.suppress(hde, th);
-            throw hde;
         } finally {
             closeSignal();
+            if (hde != null) {
+                throw hde;
+            }
         }
     }
 
@@ -87,11 +113,29 @@ public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowControl
 
     @Override
     public boolean stop() throws HyracksDataException {
+        HyracksDataException hde = null;
         if (recordReader.stop()) {
-            try {
-                waitForSignal();
-            } catch (InterruptedException e) {
-                throw new HyracksDataException(e);
+            if (failed) {
+                // failed, close here
+                try {
+                    tupleForwarder.close();
+                } catch (Throwable th) {
+                    hde = ExternalDataExceptionUtils.suppress(hde, th);
+                }
+                try {
+                    recordReader.close();
+                } catch (Throwable th) {
+                    hde = ExternalDataExceptionUtils.suppress(hde, th);
+                }
+                if (hde != null) {
+                    throw hde;
+                }
+            } else {
+                try {
+                    waitForSignal();
+                } catch (InterruptedException e) {
+                    throw new HyracksDataException(e);
+                }
             }
             return true;
         }
@@ -102,15 +146,4 @@ public class FeedRecordDataFlowController<T> extends AbstractFeedDataFlowControl
     public boolean handleException(Throwable th) {
         return true;
     }
-
-    @Override
-    public void setRecordParser(IRecordDataParser<T> dataParser) {
-        this.dataParser = dataParser;
-    }
-
-    @Override
-    public void setRecordReader(IRecordReader<T> recordReader) {
-        this.recordReader = recordReader;
-        recordReader.setController(this);
-    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
index 4ef5f6d..580e350 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedStreamDataFlowController.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.dataflow;
 import org.apache.asterix.external.api.IStreamDataParser;
 import org.apache.asterix.external.api.IStreamFlowController;
 import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 
@@ -29,6 +30,10 @@ public class FeedStreamDataFlowController extends AbstractFeedDataFlowController
     private IStreamDataParser dataParser;
     private AInputStream stream;
 
+    public FeedStreamDataFlowController(FeedLogManager feedLogManager) {
+        super(feedLogManager);
+    }
+
     @Override
     public void start(IFrameWriter writer) throws HyracksDataException {
         try {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
index e728fab..926022c 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/FeedTupleForwarder.java
@@ -18,11 +18,17 @@
  */
 package org.apache.asterix.external.dataflow;
 
+import java.io.IOException;
 import java.nio.ByteBuffer;
 import java.util.Map;
 
+import javax.annotation.Nonnull;
+
+import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
 import org.apache.asterix.external.api.ITupleForwarder;
 import org.apache.asterix.external.util.DataflowUtils;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.asterix.external.util.FeedMessageUtils;
 import org.apache.hyracks.api.comm.IFrame;
 import org.apache.hyracks.api.comm.IFrameWriter;
@@ -35,10 +41,21 @@ import org.apache.hyracks.dataflow.common.comm.util.FrameUtils;
 
 public class FeedTupleForwarder implements ITupleForwarder {
 
+    private int maxRecordSize; // temporary until the big object in storage is solved
     private FrameTupleAppender appender;
     private IFrame frame;
     private IFrameWriter writer;
     private boolean paused = false;
+    private final FeedLogManager feedLogManager;
+    private boolean initialized;
+
+    public FeedTupleForwarder(@Nonnull FeedLogManager feedLogManager) {
+        this.feedLogManager = feedLogManager;
+    }
+
+    public FeedLogManager getFeedLogManager() {
+        return feedLogManager;
+    }
 
     @Override
     public void configure(Map<String, String> configuration) {
@@ -46,18 +63,30 @@ public class FeedTupleForwarder implements ITupleForwarder {
 
     @Override
     public void initialize(IHyracksTaskContext ctx, IFrameWriter writer) throws HyracksDataException {
-        this.frame = new VSizeFrame(ctx);
-        this.writer = writer;
-        this.appender = new FrameTupleAppender(frame);
-        // Set null feed message
-        ByteBuffer message = (ByteBuffer) ctx.getSharedObject();
-        // a null message
-        message.put(FeedMessageUtils.NULL_FEED_MESSAGE);
-        message.flip();
+        if (!initialized) {
+            this.maxRecordSize = ((IAsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()
+                    .getApplicationObject()).getBufferCache().getPageSize() / 2;
+            this.frame = new VSizeFrame(ctx);
+            this.writer = writer;
+            this.appender = new FrameTupleAppender(frame);
+            // Set null feed message
+            ByteBuffer message = (ByteBuffer) ctx.getSharedObject();
+            // a null message
+            message.put(FeedMessageUtils.NULL_FEED_MESSAGE);
+            message.flip();
+            initialized = true;
+        }
     }
 
     @Override
     public void addTuple(ArrayTupleBuilder tb) throws HyracksDataException {
+        if (tb.getSize() > maxRecordSize) {
+            try {
+                feedLogManager.logRecord(tb.toString(), ExternalDataConstants.LARGE_RECORD_ERROR_MESSAGE);
+            } catch (IOException e) {
+                throw new HyracksDataException(e);
+            }
+        }
         if (paused) {
             synchronized (this) {
                 while (paused) {
@@ -86,9 +115,18 @@ public class FeedTupleForwarder implements ITupleForwarder {
         if (appender.getTupleCount() > 0) {
             FrameUtils.flushFrame(frame.getBuffer(), writer);
         }
+        try {
+            feedLogManager.close();
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
     }
 
     public void flush() throws HyracksDataException {
         appender.flush(writer);
     }
+
+    public int getMaxRecordSize() {
+        return maxRecordSize;
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java
index 68c6f9b..ffa025b 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/IndexingDataFlowController.java
@@ -18,23 +18,26 @@
  */
 package org.apache.asterix.external.dataflow;
 
+import javax.annotation.Nonnull;
+
 import org.apache.asterix.external.api.IExternalIndexer;
 import org.apache.asterix.external.api.IIndexingDatasource;
+import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
 public class IndexingDataFlowController<T> extends RecordDataFlowController<T> {
-    IExternalIndexer indexer;
+    private final IExternalIndexer indexer;
 
-    @Override
-    protected void appendOtherTupleFields(ArrayTupleBuilder tb) throws Exception {
-        indexer.index(tb);
+    public IndexingDataFlowController(@Nonnull IRecordDataParser<T> dataParser,
+            @Nonnull IRecordReader<? extends T> recordReader) throws Exception {
+        super(dataParser, recordReader);
+        indexer = ((IIndexingDatasource) recordReader).getIndexer();
+        numOfTupleFields += indexer.getNumberOfFields();
     }
 
     @Override
-    public void setRecordReader(IRecordReader<T> recordReader) throws Exception {
-        super.setRecordReader(recordReader);
-        indexer = ((IIndexingDatasource) recordReader).getIndexer();
-        numOfTupleFields += indexer.getNumberOfFields();
+    protected void appendOtherTupleFields(ArrayTupleBuilder tb) throws Exception {
+        indexer.index(tb);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
index 9353a40..57f0f3d 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/dataflow/RecordDataFlowController.java
@@ -18,20 +18,27 @@
  */
 package org.apache.asterix.external.dataflow;
 
+import javax.annotation.Nonnull;
+
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordDataParser;
-import org.apache.asterix.external.api.IRecordFlowController;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.hyracks.api.comm.IFrameWriter;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.ArrayTupleBuilder;
 
-public class RecordDataFlowController<T> extends AbstractDataFlowController implements IRecordFlowController<T> {
+public class RecordDataFlowController<T> extends AbstractDataFlowController {
 
-    protected IRecordDataParser<T> dataParser;
-    protected IRecordReader<? extends T> recordReader;
+    protected final IRecordDataParser<T> dataParser;
+    protected final IRecordReader<? extends T> recordReader;
     protected int numOfTupleFields = 1;
 
+    public RecordDataFlowController(@Nonnull IRecordDataParser<T> dataParser,
+            @Nonnull IRecordReader<? extends T> recordReader) {
+        this.dataParser = dataParser;
+        this.recordReader = recordReader;
+    }
+
     @Override
     public void start(IFrameWriter writer) throws HyracksDataException {
         try {
@@ -66,16 +73,6 @@ public class RecordDataFlowController<T> extends AbstractDataFlowController impl
     }
 
     @Override
-    public void setRecordParser(IRecordDataParser<T> dataParser) {
-        this.dataParser = dataParser;
-    }
-
-    @Override
-    public void setRecordReader(IRecordReader<T> recordReader) throws Exception {
-        this.recordReader = recordReader;
-    }
-
-    @Override
     public boolean pause() throws HyracksDataException {
         return false;
     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
index a00e732..c4f7543 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/feed/dataflow/FeedRuntimeInputHandler.java
@@ -82,13 +82,6 @@ public class FeedRuntimeInputHandler implements IFrameWriter {
     private FrameEventCallback frameEventCallback;
 
     public FeedRuntimeInputHandler(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
-            IFrameWriter coreOperator, FeedPolicyAccessor fpa, FrameTupleAccessor fta, RecordDescriptor recordDesc,
-            IFeedManager feedManager, int nPartitions) throws HyracksDataException {
-        this(ctx, connectionId, runtimeId, coreOperator, fpa, fpa.bufferingEnabled(), fta, recordDesc, feedManager,
-                nPartitions);
-    }
-
-    public FeedRuntimeInputHandler(IHyracksTaskContext ctx, FeedConnectionId connectionId, FeedRuntimeId runtimeId,
             IFrameWriter coreOperator, FeedPolicyAccessor fpa, boolean bufferingEnabled, FrameTupleAccessor fta,
             RecordDescriptor recordDesc, IFeedManager feedManager, int nPartitions) throws HyracksDataException {
         this.connectionId = connectionId;
@@ -281,7 +274,12 @@ public class FeedRuntimeInputHandler implements IFrameWriter {
                             bucket.setContentType(ContentType.DATA);
                         } else {
                             bucket.setContentType(ContentType.EOD);
+                            setFinished(true);
+                            synchronized (coreOperator) {
+                                coreOperator.notifyAll();
+                            }
                         }
+                        // TODO: fix handling of eod case with monitored buffers.
                         bucket.setDesiredReadCount(1);
                         mBuffer.sendMessage(bucket);
                         mBuffer.sendReport(frame);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java
index 895af1b..fe59aad 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/couchbase/CouchbaseReader.java
@@ -34,6 +34,7 @@ import org.apache.asterix.external.input.record.CharArrayRecord;
 import org.apache.asterix.external.input.record.GenericRecord;
 import org.apache.asterix.external.input.record.RecordWithMetadata;
 import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -80,7 +81,8 @@ public class CouchbaseReader implements IRecordReader<RecordWithMetadata<char[]>
     private CharsetDecoder decoder = StandardCharsets.UTF_8.newDecoder();
     private ByteBuffer bytes = ByteBuffer.allocateDirect(ExternalDataConstants.DEFAULT_BUFFER_SIZE);
     private CharBuffer chars = CharBuffer.allocate(ExternalDataConstants.DEFAULT_BUFFER_SIZE);
-    // metaTypes = {key(string), bucket(string), vbucket(int32), seq(long), cas(long), creationTime(long),expiration(int32),flags(int32),revSeqNumber(long),lockTime(int32)}
+    // metaTypes = {key(string), bucket(string), vbucket(int32), seq(long), cas(long),
+    // creationTime(long),expiration(int32),flags(int32),revSeqNumber(long),lockTime(int32)}
     private static final IAType[] metaTypes = new IAType[] { BuiltinType.ASTRING, BuiltinType.ASTRING,
             BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT64, BuiltinType.AINT32,
             BuiltinType.AINT32, BuiltinType.AINT64, BuiltinType.AINT32 };
@@ -256,4 +258,8 @@ public class CouchbaseReader implements IRecordReader<RecordWithMetadata<char[]>
         }
         record.endRecord();
     }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
index 564d55a..b162a02 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/hdfs/HDFSRecordReader.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IExternalIndexer;
 import org.apache.asterix.external.api.IIndexingDatasource;
 import org.apache.asterix.external.api.IRawRecord;
@@ -29,6 +30,7 @@ import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.input.record.GenericRecord;
 import org.apache.asterix.external.input.record.reader.EmptyRecordReader;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hadoop.fs.FileStatus;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
@@ -182,4 +184,12 @@ public class HDFSRecordReader<K, V extends Writable> implements IRecordReader<Wr
     public RecordReader<K, Writable> getReader() {
         return reader;
     }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
+
+    @Override
+    public void setController(IDataFlowController controller) {
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
index 13cd26a..1af8695 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/rss/RSSRecordReader.java
@@ -26,9 +26,11 @@ import java.util.List;
 import java.util.Map;
 import java.util.Queue;
 
+import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.input.record.GenericRecord;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.log4j.Logger;
 
 import com.sun.syndication.feed.synd.SyndEntryImpl;
@@ -135,6 +137,14 @@ public class RSSRecordReader implements IRecordReader<SyndEntryImpl> {
             rssFeedBuffer.addAll(fetchedFeeds);
         }
     }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
+
+    @Override
+    public void setController(IDataFlowController controller) {
+    }
 }
 
 class FetcherEventListenerImpl implements FetcherListener {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java
index 6225b82..2d6d8ea 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/AbstractStreamRecordReader.java
@@ -31,6 +31,7 @@ import org.apache.asterix.external.input.record.CharArrayRecord;
 import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.input.stream.AInputStreamReader;
 import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.FeedLogManager;
 
 public abstract class AbstractStreamRecordReader implements IRecordReader<char[]>, IIndexingDatasource {
     protected AInputStreamReader reader;
@@ -40,6 +41,7 @@ public abstract class AbstractStreamRecordReader implements IRecordReader<char[]
     protected int bufferPosn = 0;
     protected IExternalIndexer indexer;
     protected boolean done = false;
+    protected FeedLogManager feedLogManager;
 
     @Override
     public IRawRecord<char[]> next() throws IOException {
@@ -89,4 +91,10 @@ public abstract class AbstractStreamRecordReader implements IRecordReader<char[]
     public void setController(IDataFlowController controller) {
         reader.setController((AbstractFeedDataFlowController) controller);
     }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+        this.feedLogManager = feedLogManager;
+        reader.setFeedLogManager(feedLogManager);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
new file mode 100644
index 0000000..ad2d90d
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReader.java
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.stream;
+
+import java.io.IOException;
+
+import org.apache.asterix.external.util.ExternalDataConstants;
+
+public class EmptyLineSeparatedRecordReader extends AbstractStreamRecordReader {
+
+    private boolean prevCharCR;
+    private boolean prevCharLF;
+    private int newlineLength;
+    private int recordNumber = 0;
+    private int readLength;
+
+    @Override
+    public boolean hasNext() throws IOException {
+        if (done) {
+            return false;
+        }
+        if (!skipWhiteSpace()) {
+            done = true;
+            close();
+            return false;
+        }
+        newlineLength = 0;
+        prevCharCR = false;
+        prevCharLF = false;
+        record.reset();
+        readLength = 0;
+        do {
+            int startPosn = bufferPosn; //starting from where we left off the last time
+            if (bufferPosn >= bufferLength) {
+                startPosn = bufferPosn = 0;
+                bufferLength = reader.read(inputBuffer);
+                if (bufferLength <= 0) {
+                    if (readLength > 0) {
+                        record.endRecord();
+                        recordNumber++;
+                        return true;
+                    }
+                    close();
+                    return false; //EOF
+                }
+            }
+            for (; bufferPosn < bufferLength; ++bufferPosn) { //search for two consecutive newlines
+                if (inputBuffer[bufferPosn] == ExternalDataConstants.LF) {
+                    if (prevCharLF) {
+                        // \n\n
+                        ++bufferPosn; // at next invocation proceed from following byte
+                        newlineLength = 2;
+                        break;
+                    } else if (prevCharCR) {
+                        newlineLength += 1;
+                    }
+                    prevCharLF = true;
+                } else {
+                    prevCharLF = false;
+                }
+                if (inputBuffer[bufferPosn] == ExternalDataConstants.CR) { //CR + notLF, we are at notLF
+                    if (prevCharCR) {
+                        // \cr\cr
+                        newlineLength = 2;
+                        break;
+                    }
+                    prevCharCR = true;
+                } else {
+                    prevCharCR = false;
+                }
+                if (!(prevCharCR || prevCharLF)) {
+                    newlineLength = 0;
+                }
+            }
+            readLength = bufferPosn - startPosn;
+            if (readLength > 0) {
+                record.append(inputBuffer, startPosn, readLength);
+            }
+        } while (newlineLength < 2);
+        recordNumber++;
+        record.endRecord();
+        return true;
+    }
+
+    private boolean skipWhiteSpace() throws IOException {
+        // start by skipping white spaces
+        while (true) {
+            if (bufferPosn < bufferLength) {
+                if (!Character.isWhitespace(inputBuffer[bufferPosn])) {
+                    return true;
+                }
+                bufferPosn++;
+            } else {
+                // fill buffer
+                bufferPosn = 0;
+                bufferLength = reader.read(inputBuffer);
+                if (bufferLength < 0) {
+                    return false;
+                }
+            }
+        }
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java
new file mode 100644
index 0000000..a1e8f31
--- /dev/null
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/EmptyLineSeparatedRecordReaderFactory.java
@@ -0,0 +1,45 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.input.record.reader.stream;
+
+import java.util.Map;
+
+import org.apache.asterix.external.api.IRecordReader;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
+
+public class EmptyLineSeparatedRecordReaderFactory extends AbstractStreamRecordReaderFactory<char[]> {
+
+    private static final long serialVersionUID = 1L;
+
+    @Override
+    public IRecordReader<char[]> createRecordReader(IHyracksTaskContext ctx, int partition) throws Exception {
+        EmptyLineSeparatedRecordReader recordReader = new EmptyLineSeparatedRecordReader();
+        return configureReader(recordReader, ctx, partition);
+    }
+
+    @Override
+    public Class<? extends char[]> getRecordClass() {
+        return char[].class;
+    }
+
+    @Override
+    protected void configureStreamReaderFactory(Map<String, String> configuration) throws Exception {
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
index ef0519e..d61dc5c 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/LineRecordReader.java
@@ -29,6 +29,7 @@ public class LineRecordReader extends AbstractStreamRecordReader {
     protected boolean prevCharCR;
     protected int newlineLength;
     protected int recordNumber = 0;
+    private boolean configured = false;
 
     @Override
     public boolean hasNext() throws IOException {
@@ -85,6 +86,7 @@ public class LineRecordReader extends AbstractStreamRecordReader {
             readLength = bufferPosn - startPosn;
             if (prevCharCR && newlineLength == 0) {
                 --readLength; //CR at the end of the buffer
+                prevCharCR = false;
             }
             if (readLength > 0) {
                 record.append(inputBuffer, startPosn, readLength);
@@ -96,11 +98,14 @@ public class LineRecordReader extends AbstractStreamRecordReader {
 
     @Override
     public void configure(Map<String, String> configuration) throws Exception {
-        super.configure(configuration);
-        if (ExternalDataUtils.hasHeader(configuration)) {
-            if (hasNext()) {
-                next();
+        if (!configured) {
+            super.configure(configuration);
+            if (ExternalDataUtils.hasHeader(configuration)) {
+                if (hasNext()) {
+                    next();
+                }
             }
         }
+        configured = true;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
index d469cb3..9b2d095 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/stream/SemiStructuredRecordReader.java
@@ -81,7 +81,7 @@ public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
             if (bufferPosn >= bufferLength) {
                 startPosn = bufferPosn = 0;
                 bufferLength = reader.read(inputBuffer);
-                if (bufferLength <= 0) {
+                if (bufferLength < 0) {
                     close();
                     return false; // EOF
                 }
@@ -99,7 +99,9 @@ public class SemiStructuredRecordReader extends AbstractStreamRecordReader {
                             && inputBuffer[bufferPosn] != ExternalDataConstants.LF
                             && inputBuffer[bufferPosn] != ExternalDataConstants.CR) {
                         // corrupted file. clear the buffer and stop reading
-                        reader.skipError();
+                        if (!reader.skipError()) {
+                            reader.close();
+                        }
                         bufferPosn = bufferLength = 0;
                         throw new IOException("Malformed input stream");
                     }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java
index 084d6d0..617bc39 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPullRecordReader.java
@@ -22,9 +22,11 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.input.record.GenericRecord;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.asterix.external.util.TwitterUtil;
 import org.apache.asterix.external.util.TwitterUtil.SearchAPIConstants;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
@@ -97,4 +99,12 @@ public class TwitterPullRecordReader implements IRecordReader<Status> {
     public boolean stop() {
         return false;
     }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
+
+    @Override
+    public void setController(IDataFlowController controller) {
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java
index 764ac1d..19f156c 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/record/reader/twitter/TwitterPushRecordReader.java
@@ -22,9 +22,11 @@ import java.io.IOException;
 import java.util.Map;
 import java.util.concurrent.LinkedBlockingQueue;
 
+import org.apache.asterix.external.api.IDataFlowController;
 import org.apache.asterix.external.api.IRawRecord;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.input.record.GenericRecord;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.asterix.external.util.TwitterUtil;
 
 import twitter4j.FilterQuery;
@@ -123,4 +125,12 @@ public class TwitterPushRecordReader implements IRecordReader<Status> {
         public void onTrackLimitationNotice(int arg0) {
         }
     }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
+
+    @Override
+    public void setController(IDataFlowController controller) {
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java
index ce65249..469e866 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStream.java
@@ -18,16 +18,23 @@
  */
 package org.apache.asterix.external.input.stream;
 
+import java.io.IOException;
 import java.io.InputStream;
+import java.util.Map;
 
 import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.util.FeedLogManager;
 
 public abstract class AInputStream extends InputStream {
     public abstract boolean skipError() throws Exception;
 
     public abstract boolean stop() throws Exception;
 
-    public void setController(AbstractFeedDataFlowController controller) throws UnsupportedOperationException {
-        throw new UnsupportedOperationException();
-    }
+    public abstract void configure(Map<String, String> configuration) throws IOException;
+
+    // TODO: Find a better way to send notifications
+    public abstract void setController(AbstractFeedDataFlowController controller);
+
+    // TODO: Find a better way to send notifications
+    public abstract void setFeedLogManager(FeedLogManager logManager);
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
index 25418b0..e780c95 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/AInputStreamReader.java
@@ -22,6 +22,7 @@ import java.io.IOException;
 import java.io.InputStreamReader;
 
 import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.util.FeedLogManager;
 
 public class AInputStreamReader extends InputStreamReader {
     private AInputStream in;
@@ -46,4 +47,8 @@ public class AInputStreamReader extends InputStreamReader {
     public void setController(AbstractFeedDataFlowController controller) {
         in.setController(controller);
     }
+
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+        in.setFeedLogManager(feedLogManager);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java
index aa7a3d8..5b654eb 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/BasicInputStream.java
@@ -20,6 +20,10 @@ package org.apache.asterix.external.input.stream;
 
 import java.io.IOException;
 import java.io.InputStream;
+import java.util.Map;
+
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.util.FeedLogManager;
 
 public class BasicInputStream extends AInputStream {
     private final InputStream in;
@@ -83,4 +87,16 @@ public class BasicInputStream extends AInputStream {
     public boolean stop() throws Exception {
         return false;
     }
+
+    @Override
+    public void configure(Map<String, String> configuration) {
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager logManager) {
+    }
+
+    @Override
+    public void setController(AbstractFeedDataFlowController controller) {
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
index 7b7cd8b..8dcd5b6 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/LocalFileSystemInputStream.java
@@ -21,6 +21,7 @@ package org.apache.asterix.external.input.stream;
 import java.io.FileInputStream;
 import java.io.IOException;
 import java.nio.file.Path;
+import java.util.Map;
 
 import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.asterix.external.util.ExternalDataConstants;
@@ -33,10 +34,13 @@ public class LocalFileSystemInputStream extends AInputStream {
     private FileInputStream in;
     private byte lastByte;
 
-    public LocalFileSystemInputStream(Path inputResource, String expression, FeedLogManager logManager, boolean isFeed)
-            throws IOException {
-        this.watcher = new FileSystemWatcher(logManager, inputResource, expression, isFeed);
-        this.watcher.init();
+    public LocalFileSystemInputStream(Path inputResource, String expression, boolean isFeed) throws IOException {
+        this.watcher = new FileSystemWatcher(inputResource, expression, isFeed);
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager logManager) {
+        watcher.setFeedLogManager(logManager);
     }
 
     @Override
@@ -102,7 +106,8 @@ public class LocalFileSystemInputStream extends AInputStream {
         }
         int result = in.read(b, off, len);
         while (result < 0 && advance()) {
-            // return a new line at the end of every file <--Might create problems for some cases depending on the parser implementation-->
+            // return a new line at the end of every file <--Might create problems for some cases
+            // depending on the parser implementation-->
             if (lastByte != ExternalDataConstants.BYTE_LF && lastByte != ExternalDataConstants.BYTE_LF) {
                 lastByte = ExternalDataConstants.BYTE_LF;
                 b[off] = ExternalDataConstants.BYTE_LF;
@@ -128,4 +133,9 @@ public class LocalFileSystemInputStream extends AInputStream {
         watcher.close();
         return true;
     }
+
+    @Override
+    public void configure(Map<String, String> configuration) throws IOException {
+        watcher.init();
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java
index 2253a73..1e86f39 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/SocketInputStream.java
@@ -22,6 +22,10 @@ import java.io.IOException;
 import java.io.InputStream;
 import java.net.ServerSocket;
 import java.net.Socket;
+import java.util.Map;
+
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
+import org.apache.asterix.external.util.FeedLogManager;
 
 public class SocketInputStream extends AInputStream {
     private ServerSocket server;
@@ -98,4 +102,16 @@ public class SocketInputStream extends AInputStream {
     public boolean stop() throws Exception {
         return false;
     }
+
+    @Override
+    public void configure(Map<String, String> configuration) {
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager logManager) {
+    }
+
+    @Override
+    public void setController(AbstractFeedDataFlowController controller) {
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
index 06833af..3f70ce1 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/LocalFSInputStreamProviderFactory.java
@@ -56,8 +56,7 @@ public class LocalFSInputStreamProviderFactory implements IInputStreamProviderFa
 
     @Override
     public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition) throws Exception {
-        return new LocalFSInputStreamProvider(inputFileSplits, ctx, configuration, partition, expression, isFeed,
-                feedLogFileSplits);
+        return new LocalFSInputStreamProvider(inputFileSplits, ctx, configuration, partition, expression, isFeed);
     }
 
     @Override

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java
index 5314981..484626a 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/factory/TwitterFirehoseStreamProviderFactory.java
@@ -24,6 +24,7 @@ import java.util.Map;
 
 import org.apache.asterix.external.api.IInputStreamProvider;
 import org.apache.asterix.external.api.IInputStreamProviderFactory;
+import org.apache.asterix.external.input.stream.provider.TwitterFirehoseInputStreamProvider;
 import org.apache.asterix.om.util.AsterixClusterProperties;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
@@ -91,6 +92,6 @@ public class TwitterFirehoseStreamProviderFactory implements IInputStreamProvide
 
     @Override
     public IInputStreamProvider createInputStreamProvider(IHyracksTaskContext ctx, int partition) throws Exception {
-        return null;
+        return new TwitterFirehoseInputStreamProvider(configuration, ctx, partition);
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java
index 9180345..bf9653d 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/HDFSInputStreamProvider.java
@@ -23,11 +23,13 @@ import java.util.List;
 import java.util.Map;
 
 import org.apache.asterix.external.api.IInputStreamProvider;
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.asterix.external.indexing.ExternalFile;
 import org.apache.asterix.external.input.record.reader.hdfs.HDFSRecordReader;
 import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.provider.ExternalIndexerProvider;
 import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.InputSplit;
 import org.apache.hadoop.mapred.JobConf;
@@ -115,5 +117,21 @@ public class HDFSInputStreamProvider<K> extends HDFSRecordReader<K, Text> implem
         public boolean stop() throws Exception {
             return false;
         }
+
+        @Override
+        public void configure(Map<String, String> configuration) {
+        }
+
+        @Override
+        public void setFeedLogManager(FeedLogManager logManager) {
+        }
+
+        @Override
+        public void setController(AbstractFeedDataFlowController controller) {
+        }
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration) {
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java
index 4c4edd3..77520d4 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/LocalFSInputStreamProvider.java
@@ -18,7 +18,6 @@
  */
 package org.apache.asterix.external.input.stream.provider;
 
-import java.io.File;
 import java.io.IOException;
 import java.nio.file.Path;
 import java.util.Map;
@@ -27,7 +26,6 @@ import org.apache.asterix.external.api.IInputStreamProvider;
 import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.input.stream.LocalFileSystemInputStream;
 import org.apache.asterix.external.util.FeedLogManager;
-import org.apache.asterix.external.util.FeedUtils;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
 
@@ -36,29 +34,31 @@ public class LocalFSInputStreamProvider implements IInputStreamProvider {
     private String expression;
     private boolean isFeed;
     private Path path;
-    private File feedLogFile;
+    private FeedLogManager feedLogManager;
+    private Map<String, String> configuration;
 
     public LocalFSInputStreamProvider(FileSplit[] fileSplits, IHyracksTaskContext ctx,
-            Map<String, String> configuration, int partition, String expression, boolean isFeed,
-            FileSplit[] feedLogFileSplits) {
+            Map<String, String> configuration, int partition, String expression, boolean isFeed) {
         this.expression = expression;
         this.isFeed = isFeed;
         this.path = fileSplits[partition].getLocalFile().getFile().toPath();
-        if (feedLogFileSplits != null) {
-            this.feedLogFile = FeedUtils
-                    .getAbsoluteFileRef(feedLogFileSplits[partition].getLocalFile().getFile().getPath(),
-                            feedLogFileSplits[partition].getIODeviceId(), ctx.getIOManager())
-                    .getFile();
-
-        }
     }
 
     @Override
     public AInputStream getInputStream() throws IOException {
-        FeedLogManager feedLogManager = null;
-        if (isFeed && feedLogFile != null) {
-            feedLogManager = new FeedLogManager(feedLogFile);
-        }
-        return new LocalFileSystemInputStream(path, expression, feedLogManager, isFeed);
+        LocalFileSystemInputStream stream = new LocalFileSystemInputStream(path, expression, isFeed);
+        stream.setFeedLogManager(feedLogManager);
+        stream.configure(configuration);
+        return stream;
+    }
+
+    @Override
+    public void configure(Map<String, String> configuration) {
+        this.configuration = configuration;
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+        this.feedLogManager = feedLogManager;
     }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java
index 2b12675..b6da314 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/SocketInputStreamProvider.java
@@ -19,10 +19,12 @@
 package org.apache.asterix.external.input.stream.provider;
 
 import java.net.ServerSocket;
+import java.util.Map;
 
 import org.apache.asterix.external.api.IInputStreamProvider;
 import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.input.stream.SocketInputStream;
+import org.apache.asterix.external.util.FeedLogManager;
 
 public class SocketInputStreamProvider implements IInputStreamProvider {
     private ServerSocket server;
@@ -35,4 +37,12 @@ public class SocketInputStreamProvider implements IInputStreamProvider {
     public AInputStream getInputStream() throws Exception {
         return new SocketInputStream(server);
     }
+
+    @Override
+    public void configure(Map<String, String> configuration) {
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
 }



[14/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
new file mode 100644
index 0000000..431fa0c
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAd.java
@@ -0,0 +1,1565 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.Map;
+import java.util.Map.Entry;
+import java.util.TreeSet;
+
+import org.apache.asterix.external.classad.Value.NumberFactor;
+import org.apache.asterix.external.classad.object.pool.CaseInsensitiveStringPool;
+import org.apache.asterix.external.library.ClassAdParser;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.asterix.om.base.AMutableString;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class ClassAd extends ExprTree {
+
+    /*
+     * Static Variables
+     */
+    public static final int ERR_OK = 0;
+    public static final int ERR_MEM_ALLOC_FAILED = 1;
+    public static final int ERR_BAD_VALUE = 255;
+    public static final int ERR_FAILED_SET_VIEW_NAME = 256;
+    public static final int ERR_NO_RANK_EXPR = 257;
+    public static final int ERR_NO_REQUIREMENTS_EXPR = 258;
+    public static final int ERR_BAD_PARTITION_EXPRS = 259;
+    public static final int ERR_PARTITION_EXISTS = 260;
+    public static final int ERR_MISSING_ATTRNAME = 261;
+    public static final int ERR_BAD_EXPRESSION = 262;
+    public static final int ERR_INVALID_IDENTIFIER = 263;
+    public static final int ERR_MISSING_ATTRIBUTE = 264;
+    public static final int ERR_NO_SUCH_VIEW = 265;
+    public static final int ERR_VIEW_PRESENT = 266;
+    public static final int ERR_TRANSACTION_EXISTS = 267;
+    public static final int ERR_NO_SUCH_TRANSACTION = 268;
+    public static final int ERR_NO_REPRESENTATIVE = 269;
+    public static final int ERR_NO_PARENT_VIEW = 270;
+    public static final int ERR_BAD_VIEW_INFO = 271;
+    public static final int ERR_BAD_TRANSACTION_STATE = 272;
+    public static final int ERR_NO_SUCH_CLASSAD = 273;
+    public static final int ERR_BAD_CLASSAD = 275;
+    public static final int ERR_NO_KEY = 276;
+    public static final int ERR_LOG_OPEN_FAILED = 277;
+    public static final int ERR_BAD_LOG_FILENAME = 278;
+    public static final int ERR_NO_VIEW_NAME = 379;
+    public static final int ERR_RENAME_FAILED = 280;
+    public static final int ERR_NO_TRANSACTION_NAME = 281;
+    public static final int ERR_PARSE_ERROR = 282;
+    public static final int ERR_INTERNAL_CACHE_ERROR = 283;
+    public static final int ERR_FILE_WRITE_FAILED = 284;
+    public static final int ERR_FATAL_ERROR = 285;
+    public static final int ERR_CANNOT_CHANGE_MODE = 286;
+    public static final int ERR_CONNECT_FAILED = 287;
+    public static final int ERR_CLIENT_NOT_CONNECTED = 288;
+    public static final int ERR_COMMUNICATION_ERROR = 289;
+    public static final int ERR_BAD_CONNECTION_TYPE = 290;
+    public static final int ERR_BAD_SERVER_ACK = 291;
+    public static final int ERR_CANNOT_REPLACE = 292;
+    public static final int ERR_CACHE_SWITCH_ERROR = 293;
+    public static final int ERR_CACHE_FILE_ERROR = 294;
+    public static final int ERR_CACHE_CLASSAD_ERROR = 295;
+    public static final int ERR_CANT_LOAD_DYNAMIC_LIBRARY = 296;
+    public static final String ATTR_TOPLEVEL = "toplevel";
+    public static final String ATTR_ROOT = "root";
+    public static final String ATTR_SELF = "self";
+    public static final String ATTR_PARENT = "parent";
+    // The two names below are for compatibility
+    public static final String ATTR_MY = "my";
+    public static final String ATTR_CURRENT_TIME = "CurrentTime";
+    // These versions are actually taken from an external file in the original cpp source code
+    private static final int CLASSAD_VERSION_MAJOR = 8;
+    private static final int CLASSAD_VERSION_MINOR = 0;
+    private static final int CLASSAD_VERSION_PATCH = 0;
+    private static final String CLASSAD_VERSION = "8.0.0";
+    public static final ArrayList<String> specialAttrNames = new ArrayList<String>();
+    private final CaseInsensitiveStringPool StringPool = new CaseInsensitiveStringPool();
+
+    static {
+        specialAttrNames.add(ATTR_TOPLEVEL);
+        specialAttrNames.add(ATTR_ROOT);
+        specialAttrNames.add(ATTR_SELF);
+        specialAttrNames.add(ATTR_PARENT);
+    }
+
+    public static final FunctionCall curr_time_expr = FunctionCall.createFunctionCall("time", new ExprList());
+
+    private ClassAd alternateScope;
+    //private boolean doDirtyTracking;
+    private Map<CaseInsensitiveString, ExprTree> attrList = new HashMap<CaseInsensitiveString, ExprTree>();
+    private ClassAd chainedParentAd;
+    private ClassAdParser parser = null;
+    private ClassAd newAd = null;
+
+    /*
+     *  Constructors
+     */
+    public ClassAd() {
+        chainedParentAd = null;
+        alternateScope = null;
+        newAd = new ClassAd(false, false);
+        parser = new ClassAdParser();
+    }
+
+    public void configure(Map<String, String> configuration, ARecordType recordType) throws IOException {
+        parser.configure(configuration, recordType);
+    }
+
+    public ClassAd(boolean initializeParser, boolean initializeNewAd) {
+        chainedParentAd = null;
+        alternateScope = null;
+        if (initializeNewAd) {
+            newAd = new ClassAd(false, false);
+        }
+        if (initializeParser) {
+            parser = new ClassAdParser();
+        }
+    }
+
+    public ClassAd(ClassAd ad) throws HyracksDataException {
+        if (ad == null) {
+            clear();
+        } else {
+            copyFrom(ad);
+        }
+    }
+
+    @Override
+    public void reset() {
+        clear();
+    }
+
+    public boolean isReset() {
+        return false;
+    }
+
+    public ClassAd getAlternateScope() {
+        return alternateScope;
+    }
+
+    public void setAlternateScope(ClassAd alternateScope) {
+        this.alternateScope = alternateScope;
+    }
+
+    public Map<CaseInsensitiveString, ExprTree> getAttrList() {
+        return attrList;
+    }
+
+    public void setAttrList(Map<CaseInsensitiveString, ExprTree> attrList) {
+        this.attrList = attrList;
+    }
+
+    public void classAdLibraryVersion(AMutableInt32 major, AMutableInt32 minor, AMutableInt32 patch) {
+        major.setValue(CLASSAD_VERSION_MAJOR);
+        minor.setValue(CLASSAD_VERSION_MINOR);
+        patch.setValue(CLASSAD_VERSION_PATCH);
+    }
+
+    public static void classAdLibraryVersion(AMutableString version_string) {
+        version_string.setValue(CLASSAD_VERSION);
+    }
+
+    public static ArrayList<String> getSpecialAttrNames() {
+        return specialAttrNames;
+    }
+
+    public static FunctionCall getCurrentTimeExpr() {
+        return curr_time_expr;
+    }
+
+    //public TreeSet<CaseInsensitiveString> dirtyAttrList = new TreeSet<CaseInsensitiveString>();
+
+    /* Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example below
+     *  TreeSet<String> references = new TreeSet<String>(
+     *        new Comparator<String>(){
+     *            public int compare(String o1, String o2) {
+     *    return o1.compareToIgnoreCase(o2);
+     *    }
+     *            });
+     *
+     // PortReferences is a Map<ClassAd,OrderedSet<Strings>> */
+
+    public boolean copyFrom(ClassAd ad) throws HyracksDataException {
+
+        boolean succeeded = true;
+        if (this == ad) {
+            succeeded = false;
+        } else {
+            clear();
+            // copy scoping attributes
+            super.copyFrom(ad);
+            if (ad.chainedParentAd != null) {
+                if (chainedParentAd == null) {
+                    chainedParentAd = new ClassAd();
+                }
+                chainedParentAd.setValue(ad.chainedParentAd);
+            }
+            if (ad.alternateScope != null) {
+                if (alternateScope == null) {
+                    alternateScope = new ClassAd();
+                }
+                alternateScope.setValue(ad.alternateScope);
+            }
+            //this.doDirtyTracking = false;
+            for (Entry<CaseInsensitiveString, ExprTree> attr : ad.attrList.entrySet()) {
+                ExprTree tree = attr.getValue().copy();
+                attrList.put(attr.getKey(), tree);
+                // if (ad.doDirtyTracking && ad.IsAttributeDirty(attr.getKey())) {
+                //   dirtyAttrList.add(attr.getKey());
+                //}
+            }
+            //doDirtyTracking = ad.doDirtyTracking;
+        }
+        return succeeded;
+    }
+
+    public boolean update(ClassAd ad) throws HyracksDataException {
+        for (Entry<CaseInsensitiveString, ExprTree> attr : ad.attrList.entrySet()) {
+            ExprTree tree = attr.getValue().copy();
+            attrList.put(attr.getKey(), tree);
+            // if (ad.doDirtyTracking && ad.IsAttributeDirty(attr.getKey())) {
+            //   dirtyAttrList.add(attr.getKey());
+            //}
+        }
+        return true;
+    }
+
+    public boolean updateFromChain(ClassAd ad) throws HyracksDataException {
+        ClassAd parent = ad.chainedParentAd;
+        if (parent != null) {
+            if (!updateFromChain(parent)) {
+                return false;
+            }
+        }
+        return update(ad);
+    }
+
+    public boolean copyFromChain(ClassAd ad) throws HyracksDataException {
+        if (this == ad) {
+            return false;
+        }
+        clear();
+        super.copyFrom(ad);
+        return updateFromChain(ad);
+    }
+
+    @Override
+    public boolean sameAs(ExprTree tree) {
+        boolean is_same;
+        ExprTree pSelfTree = tree.self();
+
+        if (this == pSelfTree) {
+            is_same = true;
+        } else if (pSelfTree.getKind() != NodeKind.CLASSAD_NODE) {
+            is_same = false;
+        } else {
+            ClassAd other_classad;
+            other_classad = (ClassAd) pSelfTree;
+
+            if (attrList.size() != other_classad.attrList.size()) {
+                is_same = false;
+            } else {
+                is_same = true;
+
+                for (Entry<CaseInsensitiveString, ExprTree> attr : attrList.entrySet()) {
+                    ExprTree this_tree = attr.getValue();
+                    ExprTree other_tree = other_classad.lookup(attr.getKey());
+                    if (other_tree == null) {
+                        is_same = false;
+                        break;
+                    } else if (!this_tree.sameAs(other_tree)) {
+                        is_same = false;
+                        break;
+                    }
+                }
+            }
+        }
+        return is_same;
+    }
+
+    public void clear() {
+        unchain();
+        attrList.clear();
+        if (alternateScope != null) {
+            alternateScope.clear();
+        }
+        if (parser != null) {
+            parser.reset();
+        }
+    }
+
+    public void unchain() {
+        if (chainedParentAd != null) {
+            chainedParentAd.clear();
+        }
+    }
+
+    public void getComponents(Map<CaseInsensitiveString, ExprTree> attrs) {
+        attrs.clear();
+        for (Entry<CaseInsensitiveString, ExprTree> attr : this.attrList.entrySet()) {
+            attrs.put(attr.getKey(), attr.getValue());
+        }
+    }
+
+    public ClassAd privateGetDeepScope(ExprTree tree) throws HyracksDataException {
+        ClassAd scope = new ClassAd();
+        Value val = new Value();
+        if (tree == null)
+            return (null);
+        tree.setParentScope(this);
+        if (!tree.publicEvaluate(val) || !val.isClassAdValue(scope)) {
+            return (null);
+        }
+        return (scope);
+    }
+
+    // --- begin integer attribute insertion ----
+    public boolean insertAttr(String name, int value, NumberFactor f) throws HyracksDataException {
+        ExprTree plit;
+        Value val = new Value();
+        val.setIntegerValue(value);
+        plit = Literal.createLiteral(val, f);
+        return insert(name, plit);
+    }
+
+    public boolean insertAttr(String name, int value) throws HyracksDataException {
+        return insertAttr(name, value, NumberFactor.NO_FACTOR);
+    }
+
+    public boolean insertAttr(String name, long value, NumberFactor f) throws HyracksDataException {
+        ExprTree plit;
+        Value val = new Value();
+
+        val.setIntegerValue(value);
+        plit = Literal.createLiteral(val, f);
+        return (insert(name, plit));
+    }
+
+    public boolean insertAttr(String name, long value) throws HyracksDataException {
+        return insertAttr(name, value, NumberFactor.NO_FACTOR);
+    }
+
+    public boolean deepInsertAttr(ExprTree scopeExpr, String name, int value, NumberFactor f)
+            throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (false);
+        return (ad.insertAttr(name, value, f));
+    }
+
+    public boolean deepInsertAttr(ExprTree scopeExpr, String name, long value, NumberFactor f)
+            throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (false);
+        return (ad.insertAttr(name, value, f));
+    }
+
+    // --- end integer attribute insertion ---
+
+    // --- begin real attribute insertion ---
+    public boolean insertAttr(String name, double value, NumberFactor f) throws HyracksDataException {
+        ExprTree plit;
+        Value val = new Value();
+        val.setRealValue(value);
+        plit = Literal.createLiteral(val, f);
+        return (insert(name, plit));
+    }
+
+    public boolean deepInsertAttr(ExprTree scopeExpr, String name, double value, NumberFactor f)
+            throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (false);
+        return (ad.insertAttr(name, value, f));
+    }
+
+    // --- end real attribute insertion
+
+    // --- begin boolean attribute insertion
+    public boolean insertAttr(String name, boolean value) throws HyracksDataException {
+        ExprTree plit;
+        Value val = new Value();
+        val.setBooleanValue(value);
+        plit = Literal.createLiteral(val);
+        return (insert(name, plit));
+    }
+
+    public boolean deepInsertAttr(ExprTree scopeExpr, String name, boolean value) throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (false);
+        return (ad.insertAttr(name, value));
+    }
+
+    // --- end boolean attribute insertion
+
+    // --- begin string attribute insertion
+    public boolean insertAttr(String name, AMutableCharArrayString value) throws HyracksDataException {
+        ExprTree plit;
+        Value val = new Value();
+        val.setStringValue(value);
+        plit = Literal.createLiteral(val);
+        return (insert(name, plit));
+    }
+
+    public boolean deepInsertAttr(ExprTree scopeExpr, String name, AMutableCharArrayString value)
+            throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (false);
+        return (ad.insertAttr(name, value));
+    }
+
+    public boolean insertAttr(String name, String value) throws HyracksDataException {
+        ExprTree plit;
+        Value val = new Value();
+
+        val.setStringValue(value);
+        plit = Literal.createLiteral(val);
+        return (insert(name, plit));
+    }
+
+    public boolean deepInsertAttr(ExprTree scopeExpr, String name, String value) throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (false);
+        return (ad.insertAttr(name, value));
+    }
+
+    // --- end string attribute insertion
+
+    public boolean insert(String serialized_nvp) throws IOException {
+        boolean bRet = false;
+        String name, szValue;
+        int pos, npos, vpos;
+        int bpos = 0;
+
+        // comes in as "name = value" "name= value" or "name =value"
+        npos = pos = serialized_nvp.indexOf('=');
+
+        // only try to process if the string is valid
+        if (pos >= 0) {
+            while (npos > 0 && serialized_nvp.charAt(npos - 1) == ' ') {
+                npos--;
+            }
+            while (bpos < npos && serialized_nvp.charAt(bpos) == ' ') {
+                bpos++;
+            }
+            name = serialized_nvp.substring(bpos, npos);
+
+            vpos = pos + 1;
+            while (serialized_nvp.charAt(vpos) == ' ') {
+                vpos++;
+            }
+
+            szValue = serialized_nvp.substring(vpos);
+            if (name.charAt(0) == '\'') {
+                // We don't handle quoted attribute names for caching here.
+                // Hand the name-value-pair off to the parser as a one-attribute
+                // ad and merge the results into this ad.
+                newAd.clear();
+                name = "[" + serialized_nvp.toString() + "]";
+                if (parser.parseClassAd(name, newAd, true)) {
+                    return update(newAd);
+                } else {
+                    return false;
+                }
+            }
+
+            ExprTree newTree;
+            // we did not hit in the cache... parse the expression
+            newTree = parser.ParseExpression(szValue);
+            if (newTree != null) {
+                // if caching is enabled, and we got to here then we know that the
+                // cache doesn't already have an entry for this name:value, so add
+                // it to the cache now.
+                if (newTree.getKind() != NodeKind.LITERAL_NODE) {
+                    Literal lit = parser.getLiteral();
+                    lit.getValue().setStringValue(szValue);
+                    bRet = insert(name, lit, false);
+                } else {
+                    bRet = insert(name, newTree, false);
+                }
+            }
+
+        } // end if pos >=0
+        return bRet;
+    }
+
+    public boolean insert(String attrName, ExprTree expr) throws HyracksDataException {
+        boolean result = insert(attrName, expr.isTreeHolder() ? ((ExprTreeHolder) expr).getInnerTree() : expr, false);
+        return result;
+    }
+
+    public boolean insert(String attrName, ExprTree pRef, boolean cache) throws HyracksDataException {
+        boolean bRet = false;
+        ExprTree tree = pRef;
+        // sanity checks
+        if (attrName.isEmpty() || pRef == null) {
+            throw new HyracksDataException();
+        }
+        CaseInsensitiveString pstrAttr = StringPool.get();
+        pstrAttr.set(attrName);
+
+        if (tree != null) {
+            // parent of the expression is this classad
+            tree.setParentScope(this);
+            attrList.put(pstrAttr, tree);
+            bRet = true;
+        }
+        return (bRet);
+    }
+
+    public boolean deepInsert(ExprTree scopeExpr, String name, ExprTree tree) throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (false);
+        return (ad.insert(name, tree));
+    }
+
+    // --- end expression insertion
+
+    // --- begin lookup methods
+    public ExprTree lookup(String name) {
+        CaseInsensitiveString aString = StringPool.get();
+        aString.set(name);
+        ExprTree expr = lookup(aString);
+        StringPool.put(aString);
+        return expr;
+    }
+
+    public ExprTree lookup(CaseInsensitiveString name) {
+        /*System.out.println("Lookup Printing all attributes with their values:");
+        for (Entry<String, ExprTree> entry : attrList.entrySet()) {
+            System.out.println(entry.getKey() + ":" + entry.getValue().getKind());
+        }*/
+        ExprTree attr = attrList.get(name);
+        if (attr != null) {
+            return attr;
+        } else if (chainedParentAd != null) {
+            return chainedParentAd.lookup(name);
+        } else {
+            return null;
+        }
+    }
+
+    public ExprTree lookupInScope(AMutableCharArrayString name, ClassAd finalScope) {
+        EvalState state = new EvalState();
+        ExprTreeHolder tree = new ExprTreeHolder();
+        int rval;
+        state.setScopes(this);
+        rval = lookupInScope(name.toString(), tree, state);
+        if (rval == EvalResult.EVAL_OK.ordinal()) {
+            finalScope.setValue(state.getCurAd());
+            return (tree);
+        }
+        finalScope.setValue(null);
+        return null;
+    }
+
+    public int lookupInScope(String name, ExprTreeHolder expr, EvalState state) {
+
+        ClassAd current = this;
+        ClassAd superScope = new ClassAd();
+        expr.setInnerTree(null);
+
+        while (expr.getInnerTree() == null && current != null) {
+            // lookups/eval's being done in the 'current' ad
+            state.getCurAd().setValue(current);
+
+            // lookup in current scope
+            expr.setInnerTree(current.lookup(name));
+            if ((expr.getInnerTree() != null)) {
+                return EvalResult.EVAL_OK.ordinal();
+            }
+
+            if (state.getRootAd().equals(current)) {
+                superScope = null;
+            } else {
+                superScope = current.parentScope;
+            }
+            if (!getSpecialAttrNames().contains(name)) {
+                // continue searching from the superScope ...
+                current = superScope;
+                if (current == this) { // NAC - simple loop checker
+                    return (EvalResult.EVAL_UNDEF.ordinal());
+                }
+            } else if (name.equalsIgnoreCase(ATTR_TOPLEVEL) || name.equalsIgnoreCase(ATTR_ROOT)) {
+                // if the "toplevel" attribute was requested ...
+                expr.setInnerTree(state.getRootAd());
+                if (expr.getInnerTree() == null) { // NAC - circularity so no root
+                    return EvalResult.EVAL_FAIL.ordinal(); // NAC
+                } // NAC
+                return (expr.getInnerTree() != null ? EvalResult.EVAL_OK.ordinal() : EvalResult.EVAL_UNDEF.ordinal());
+            } else if (name.equalsIgnoreCase(ATTR_SELF) || name.equalsIgnoreCase(ATTR_MY)) {
+                // if the "self" ad was requested
+                expr.setInnerTree(state.getCurAd());
+                return (expr.getInnerTree() != null ? EvalResult.EVAL_OK.ordinal() : EvalResult.EVAL_UNDEF.ordinal());
+            } else if (name.equalsIgnoreCase(ATTR_PARENT)) {
+                // the lexical parent
+                expr.setInnerTree(superScope);
+                return (expr.getInnerTree() != null ? EvalResult.EVAL_OK.ordinal() : EvalResult.EVAL_UNDEF.ordinal());
+            } else if (name.equalsIgnoreCase(ATTR_CURRENT_TIME)) {
+                // an alias for time() from old ClassAds
+                expr.setInnerTree(getCurrentTimeExpr());
+                return (expr.getInnerTree() != null ? EvalResult.EVAL_OK.ordinal() : EvalResult.EVAL_UNDEF.ordinal());
+            }
+        }
+        return (EvalResult.EVAL_UNDEF.ordinal());
+    }
+
+    // --- end lookup methods
+
+    // --- begin deletion methods
+    public boolean delete(String name) throws HyracksDataException {
+        CaseInsensitiveString aString = StringPool.get();
+        aString.set(name);
+        boolean success = delete(aString);
+        StringPool.put(aString);
+        return success;
+    }
+
+    public boolean delete(CaseInsensitiveString name) throws HyracksDataException {
+        boolean deleted_attribute;
+        deleted_attribute = false;
+        if (attrList.containsKey(name)) {
+            attrList.remove(name);
+            deleted_attribute = true;
+        }
+        // If the attribute is in the chained parent, we delete define it
+        // here as undefined, whether or not it was defined here.  This is
+        // behavior copied from old ClassAds. It's also one reason you
+        // probably don't want to use this feature in the future.
+        if (chainedParentAd != null && chainedParentAd.lookup(name) != null) {
+            Value undefined_value = new Value();
+            undefined_value.setUndefinedValue();
+            deleted_attribute = true;
+            ExprTree plit = Literal.createLiteral(undefined_value);
+            insert(name.get(), plit);
+        }
+        return deleted_attribute;
+    }
+
+    public boolean deepDelete(ExprTree scopeExpr, String name) throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (false);
+        CaseInsensitiveString aString = StringPool.get();
+        aString.set(name);;
+        boolean success = ad.delete(aString);
+        StringPool.put(aString);
+        return success;
+    }
+
+    // --- end deletion methods
+
+    // --- begin removal methods
+    public ExprTree remove(String name) throws HyracksDataException {
+        ExprTree tree = null;
+        if (attrList.containsKey(name)) {
+            tree = attrList.remove(name);
+        }
+
+        // If the attribute is in the chained parent, we delete define it
+        // here as undefined, whether or not it was defined here.  This is
+        // behavior copied from old ClassAds. It's also one reason you
+        // probably don't want to use this feature in the future.
+        if (chainedParentAd != null && chainedParentAd.lookup(name) != null) {
+            if (tree == null) {
+                tree = chainedParentAd.lookup(name);
+            }
+            Value undefined_value = new Value();
+            undefined_value.setUndefinedValue();
+            ExprTree plit = Literal.createLiteral(undefined_value);
+            //why??
+            insert(name, plit);
+        }
+        return tree;
+    }
+
+    public ExprTree deepRemove(ExprTree scopeExpr, String name) throws HyracksDataException {
+        ClassAd ad = privateGetDeepScope(scopeExpr);
+        if (ad == null)
+            return (null);
+        return (ad.remove(name));
+    }
+
+    // --- end removal methods
+
+    @Override
+    public void privateSetParentScope(ClassAd ad) {
+        // already set by base class for this node; we shouldn't propagate
+        // the call to sub-expressions because this is a new scope
+    }
+
+    public void modify(ClassAd mod) throws HyracksDataException {
+        ClassAd ctx;
+        ExprTree expr;
+        Value val = new Value();
+
+        // Step 0:  Determine Context
+        if ((expr = mod.lookup(Common.ATTR_CONTEXT)) != null) {
+            if ((ctx = privateGetDeepScope(expr)) == null) {
+                return;
+            }
+        } else {
+            ctx = this;
+        }
+
+        // Step 1:  Process Replace attribute
+        if ((expr = mod.lookup(Common.ATTR_REPLACE)) != null) {
+            ClassAd ad = new ClassAd();
+            if (expr.publicEvaluate(val) && val.isClassAdValue(ad)) {
+                ctx.clear();
+                ctx.update(ad);
+            }
+        }
+
+        // Step 2:  Process Updates attribute
+        if ((expr = mod.lookup(Common.ATTR_UPDATES)) != null) {
+            ClassAd ad = new ClassAd();
+            if (expr.publicEvaluate(val) && val.isClassAdValue(ad)) {
+                ctx.update(ad);
+            }
+        }
+
+        // Step 3:  Process Deletes attribute
+        if ((expr = mod.lookup(Common.ATTR_DELETES)) != null) {
+            ExprList list = new ExprList();
+            AMutableCharArrayString attrName = new AMutableCharArrayString();
+
+            // make a first pass to check that it is a list of strings ...
+            if (!expr.publicEvaluate(val) || !val.isListValue(list)) {
+                return;
+            }
+            for (ExprTree aExpr : list.getExprList()) {
+                if (!aExpr.publicEvaluate(val) || !val.isStringValue(attrName)) {
+                    return;
+                }
+            }
+            // now go through and delete all the named attributes ...
+            for (ExprTree aExpr : list.getExprList()) {
+                if (aExpr.publicEvaluate(val) && val.isStringValue(attrName)) {
+                    ctx.delete(attrName.toString());
+                }
+            }
+        }
+    }
+
+    @Override
+    public ExprTree copy() throws HyracksDataException {
+        ClassAd newAd = new ClassAd();
+        newAd.parentScope = parentScope;
+        newAd.chainedParentAd = chainedParentAd;
+
+        for (Entry<CaseInsensitiveString, ExprTree> entry : attrList.entrySet()) {
+            newAd.insert(entry.getKey().get(), entry.getValue().copy(), false);
+        }
+        return newAd;
+    }
+
+    @Override
+    public boolean publicEvaluate(EvalState state, Value val) {
+        val.setClassAdValue(this);
+        return (true);
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val, ExprTreeHolder tree) throws HyracksDataException {
+        val.setClassAdValue(this);
+        tree.setInnerTree(copy());
+        return true;
+    }
+
+    @Override
+    public boolean privateFlatten(EvalState state, Value val, ExprTreeHolder tree, AMutableInt32 i)
+            throws HyracksDataException {
+        ClassAd newAd = new ClassAd();
+        Value eval = new Value();
+        ExprTreeHolder etree = new ExprTreeHolder();
+        ClassAd oldAd;
+
+        tree.setInnerTree(null); // Just to be safe...  wenger 2003-12-11.
+
+        oldAd = state.getCurAd();
+        state.setCurAd(this);
+
+        for (Entry<CaseInsensitiveString, ExprTree> entry : attrList.entrySet()) {
+            // flatten expression
+            if (!entry.getValue().publicFlatten(state, eval, etree)) {
+                tree.setInnerTree(null);;
+                eval.clear();
+                state.setCurAd(oldAd);
+                return false;
+            }
+
+            // if a value was obtained, convert it to a literal
+            if (etree.getInnerTree() == null) {
+                etree.setInnerTree(Literal.createLiteral(eval));
+                if (etree.getInnerTree() == null) {
+                    tree.setInnerTree(null);
+                    eval.clear();
+                    state.setCurAd(oldAd);
+                    return false;
+                }
+            }
+            newAd.attrList.put(entry.getKey(), etree);
+            eval.clear();
+        }
+
+        tree.setInnerTree(newAd);
+        state.setCurAd(oldAd);
+        return true;
+    }
+
+    public boolean evaluateAttr(String attr, Value val) throws HyracksDataException {
+        EvalState state = new EvalState();
+        ExprTreeHolder tree = new ExprTreeHolder();
+        state.setScopes(this);
+        switch (lookupInScope(attr, tree, state)) {
+            case ExprTree.EVAL_FAIL_Int:
+                return false;
+            case ExprTree.EVAL_OK_Int:
+                return (tree.publicEvaluate(state, val));
+            case ExprTree.EVAL_UNDEF_Int:
+                val.setUndefinedValue();
+                return (true);
+            case ExprTree.EVAL_ERROR_Int:
+                val.setErrorValue();
+                return (true);
+            default:
+                return false;
+        }
+    }
+
+    public boolean evaluateExpr(String buf, Value result) throws HyracksDataException {
+        boolean successfully_evaluated;
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ClassAdParser parser = new ClassAdParser();
+
+        try {
+            if (parser.parseExpression(buf, tree)) {
+                successfully_evaluated = evaluateExpr(tree, result);
+            } else {
+                successfully_evaluated = false;
+            }
+        } catch (IOException e) {
+            throw new HyracksDataException(e);
+        }
+        return successfully_evaluated;
+    }
+
+    public boolean evaluateExpr(ExprTreeHolder tree, Value val) throws HyracksDataException {
+        EvalState state = new EvalState();
+        state.setScopes(this);
+        return (tree.publicEvaluate(state, val));
+    }
+
+    public boolean evaluateExpr(ExprTreeHolder tree, Value val, ExprTreeHolder sig) throws HyracksDataException {
+        EvalState state = new EvalState();
+        state.setScopes(this);
+        return (tree.publicEvaluate(state, val, sig));
+    }
+
+    public boolean evaluateAttrInt(String attr, AMutableInt64 i) throws HyracksDataException {
+        Value val = new Value();
+        return (evaluateAttr(attr, val) && val.isIntegerValue(i));
+    }
+
+    public boolean evaluateAttrReal(String attr, AMutableDouble r) throws HyracksDataException {
+        Value val = new Value();
+        return (evaluateAttr(attr, val) && val.isRealValue(r));
+    }
+
+    public boolean evaluateAttrNumber(String attr, AMutableInt64 i) throws HyracksDataException {
+        Value val = new Value();
+        return (evaluateAttr(attr, val) && val.isNumber(i));
+    }
+
+    public boolean evaluateAttrNumber(String attr, AMutableDouble r) throws HyracksDataException {
+        Value val = new Value();
+        return (evaluateAttr(attr, val) && val.isNumber(r));
+    }
+
+    public boolean evaluateAttrString(String attr, AMutableCharArrayString buf, int len) throws HyracksDataException {
+        Value val = new Value();
+        return (evaluateAttr(attr, val) && val.isStringValue(buf, len));
+    }
+
+    public boolean evaluateAttrString(String attr, AMutableCharArrayString buf) throws HyracksDataException {
+        Value val = new Value();
+        return (evaluateAttr(attr, val) && val.isStringValue(buf));
+    }
+
+    public boolean evaluateAttrBool(String attr, MutableBoolean b) throws HyracksDataException {
+        Value val = new Value();
+        return (evaluateAttr(attr, val) && val.isBooleanValue(b));
+    }
+
+    public boolean evaluateAttrBoolEquiv(String attr, MutableBoolean b) throws HyracksDataException {
+        Value val = new Value();
+        return (evaluateAttr(attr, val) && val.isBooleanValueEquiv(b));
+    }
+
+    /* Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example below
+     *  TreeSet<String> references = new TreeSet<String>(
+     *        new Comparator<String>(){
+     *            public int compare(String o1, String o2) {
+     *    return o1.compareToIgnoreCase(o2);
+     *    }
+     *            });
+     *
+     // PortReferences is a Map<ClassAd,OrderedSet<Strings>> */
+
+    public boolean getExternalReferences(ExprTree tree, TreeSet<String> refs, boolean fullNames)
+            throws HyracksDataException {
+        EvalState state = new EvalState();
+        // Treat this ad as the root of the tree for reference tracking.
+        // If an attribute is only present in a parent scope of this ad,
+        // then we want to treat it as an external reference.
+        state.setRootAd(this);
+        state.setCurAd(this);
+        return (privateGetExternalReferences(tree, this, state, refs, fullNames));
+    }
+
+    public boolean privateGetExternalReferences(ExprTree expr, ClassAd ad, EvalState state, TreeSet<String> refs,
+            boolean fullNames) throws HyracksDataException {
+        if (expr.isTreeHolder()) {
+            expr = ((ExprTreeHolder) expr).getInnerTree();
+        }
+        switch (expr.getKind()) {
+            case LITERAL_NODE:
+                // no external references here
+                return (true);
+
+            case ATTRREF_NODE: {
+                ClassAd start = new ClassAd();
+                ExprTreeHolder tree = new ExprTreeHolder();
+                ExprTreeHolder result = new ExprTreeHolder();
+                AMutableCharArrayString attr = new AMutableCharArrayString();
+                Value val = new Value();
+                MutableBoolean abs = new MutableBoolean();
+
+                ((AttributeReference) expr).getComponents(tree, attr, abs);
+                // establish starting point for attribute search
+                if (tree.getInnerTree() == null) {
+                    start = abs.booleanValue() ? state.getRootAd() : state.getCurAd();
+                    if (abs.booleanValue() && (start == null)) {// NAC - circularity so no root
+                        return false; // NAC
+                    } // NAC
+                } else {
+                    if (!tree.publicEvaluate(state, val)) {
+                        return (false);
+                    }
+                    // if the tree evals to undefined, the external references
+                    // are in the tree part
+                    if (val.isUndefinedValue()) {
+                        if (fullNames) {
+                            AMutableCharArrayString fullName = new AMutableCharArrayString();
+                            if (tree.getInnerTree() != null) {
+                                ClassAdUnParser unparser = new PrettyPrint();
+                                unparser.unparse(fullName, tree);
+                                fullName.appendChar('.');
+                            }
+                            fullName.appendString(attr);
+                            refs.add(fullName.toString());
+                            return true;
+                        } else {
+                            if (state.getDepthRemaining() <= 0) {
+                                return false;
+                            }
+                            state.decrementDepth();
+                            boolean ret = privateGetExternalReferences(tree, ad, state, refs, fullNames);
+                            state.incrementDepth();
+                            return ret;
+                        }
+                    }
+                    // otherwise, if the tree didn't evaluate to a classad,
+                    // we have a problem
+                    if (!val.isClassAdValue(start)) {
+                        return (false);
+                    }
+                }
+                // lookup for attribute
+                ClassAd curAd = state.getCurAd();
+                switch (start.lookupInScope(attr.toString(), result, state)) {
+                    case EVAL_ERROR_Int:
+                        // some error
+                        return (false);
+                    case EVAL_UNDEF_Int:
+                        // attr is external
+                        refs.add(attr.toString());
+                        state.setCurAd(curAd);
+                        return (true);
+                    case EVAL_OK_Int: {
+                        // attr is internal; find external refs in result
+                        if (state.getDepthRemaining() <= 0) {
+                            state.setCurAd(curAd);
+                            return false;
+                        }
+                        state.decrementDepth();
+                        boolean rval = privateGetExternalReferences(result, ad, state, refs, fullNames);
+                        state.incrementDepth();
+                        state.setCurAd(curAd);
+                        return (rval);
+                    }
+
+                    case EVAL_FAIL_Int:
+                    default:
+                        // enh??
+                        return (false);
+                }
+            }
+            case OP_NODE: {
+                // recurse on subtrees
+                AMutableInt32 opKind = new AMutableInt32(0);
+                ExprTreeHolder t1 = new ExprTreeHolder();
+                ExprTreeHolder t2 = new ExprTreeHolder();
+                ExprTreeHolder t3 = new ExprTreeHolder();
+
+                ((Operation) expr).getComponents(opKind, t1, t2, t3);
+                if (t1.getInnerTree() != null && !privateGetExternalReferences(t1, ad, state, refs, fullNames)) {
+                    return (false);
+                }
+                if (t2.getInnerTree() != null && !privateGetExternalReferences(t2, ad, state, refs, fullNames)) {
+                    return (false);
+                }
+                if (t3.getInnerTree() != null && !privateGetExternalReferences(t3, ad, state, refs, fullNames)) {
+                    return (false);
+                }
+                return (true);
+            }
+            case FN_CALL_NODE: {
+                // recurse on subtrees
+                AMutableCharArrayString fnName = new AMutableCharArrayString();
+                ExprList args = new ExprList();
+                ((FunctionCall) expr).getComponents(fnName, args);
+                for (ExprTree tree : args.getExprList()) {
+                    if (!privateGetExternalReferences(tree, ad, state, refs, fullNames)) {
+                        return (false);
+                    }
+                }
+                return (true);
+            }
+            case CLASSAD_NODE: {
+                // recurse on subtrees
+                Map<CaseInsensitiveString, ExprTree> attrs = new HashMap<CaseInsensitiveString, ExprTree>();
+                ((ClassAd) expr).getComponents(attrs);
+                for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
+                    if (state.getDepthRemaining() <= 0) {
+                        return false;
+                    }
+                    state.decrementDepth();
+                    boolean ret = privateGetExternalReferences(entry.getValue(), ad, state, refs, fullNames);
+                    state.incrementDepth();
+                    if (!ret) {
+                        return (false);
+                    }
+                }
+                return (true);
+            }
+            case EXPR_LIST_NODE: {
+                // recurse on subtrees
+                ExprList exprs = new ExprList();
+
+                ((ExprList) expr).getComponents(exprs);
+                for (ExprTree exprTree : exprs.getExprList()) {
+                    if (state.getDepthRemaining() <= 0) {
+                        return false;
+                    }
+                    state.decrementDepth();
+
+                    boolean ret = privateGetExternalReferences(exprTree, ad, state, refs, fullNames);
+
+                    state.incrementDepth();
+                    if (!ret) {
+                        return (false);
+                    }
+                }
+                return (true);
+            }
+            default:
+                return false;
+        }
+    }
+
+    // PortReferences is a Map<ClassAd,TreeSet<Strings>>
+    public boolean getExternalReferences(ExprTree tree, Map<ClassAd, TreeSet<String>> refs)
+            throws HyracksDataException {
+        EvalState state = new EvalState();
+        // Treat this ad as the root of the tree for reference tracking.
+        // If an attribute is only present in a parent scope of this ad,
+        // then we want to treat it as an external reference.
+        state.setRootAd(this);
+        state.setCurAd(this);
+
+        return (privateGetExternalReferences(tree, this, state, refs));
+    }
+
+    public boolean privateGetExternalReferences(ExprTree expr, ClassAd ad, EvalState state,
+            Map<ClassAd, TreeSet<String>> refs) throws HyracksDataException {
+        switch (expr.getKind()) {
+            case LITERAL_NODE:
+                // no external references here
+                return (true);
+
+            case ATTRREF_NODE: {
+                ClassAd start = new ClassAd();
+                ExprTreeHolder tree = new ExprTreeHolder();
+                ExprTreeHolder result = new ExprTreeHolder();
+                AMutableCharArrayString attr = new AMutableCharArrayString();
+                Value val = new Value();
+                MutableBoolean abs = new MutableBoolean();
+
+                ((AttributeReference) expr).getComponents(tree, attr, abs);
+                // establish starting point for attribute search
+                if (tree.getInnerTree() == null) {
+                    start = abs.booleanValue() ? state.getRootAd() : state.getCurAd();
+                    if (abs.booleanValue() && (start == null)) {// NAC - circularity so no root
+                        return false; // NAC
+                    } // NAC
+                } else {
+                    if (!tree.publicEvaluate(state, val))
+                        return (false);
+                    // if the tree evals to undefined, the external references
+                    // are in the tree part
+                    if (val.isUndefinedValue()) {
+                        return (privateGetExternalReferences(tree, ad, state, refs));
+                    }
+                    // otherwise, if the tree didn't evaluate to a classad,
+                    // we have a problem
+                    if (!val.isClassAdValue(start))
+                        return (false);
+
+                    // make sure that we are starting from a "valid" scope
+                    if (!refs.containsKey(start) && start != this) {
+                        return (false);
+                    }
+                }
+                // lookup for attribute
+                ClassAd curAd = state.getCurAd();
+                TreeSet<String> pitr = refs.get(start);
+                if (pitr == null) {
+                    pitr = new TreeSet<String>();
+                    refs.put(start, pitr);
+                }
+                switch (start.lookupInScope(attr.toString(), result, state)) {
+                    case EVAL_ERROR_Int:
+                        // some error
+                        return (false);
+
+                    case EVAL_UNDEF_Int:
+                        // attr is external
+                        pitr.add(attr.toString());
+                        state.setCurAd(curAd);
+                        return (true);
+                    case EVAL_OK_Int: {
+                        // attr is internal; find external refs in result
+                        boolean rval = privateGetExternalReferences(result, ad, state, refs);
+                        state.setCurAd(curAd);
+                        return (rval);
+                    }
+
+                    case EVAL_FAIL_Int:
+                    default:
+                        // enh??
+                        return (false);
+                }
+            }
+
+            case OP_NODE: {
+                // recurse on subtrees
+                AMutableInt32 opKind = new AMutableInt32(0);
+                ExprTreeHolder t1 = new ExprTreeHolder();
+                ExprTreeHolder t2 = new ExprTreeHolder();
+                ExprTreeHolder t3 = new ExprTreeHolder();
+                ((Operation) expr).getComponents(opKind, t1, t2, t3);
+                if (t1.getInnerTree() != null && !privateGetExternalReferences(t1, ad, state, refs)) {
+                    return (false);
+                }
+                if (t2.getInnerTree() != null && !privateGetExternalReferences(t2, ad, state, refs)) {
+                    return (false);
+                }
+                if (t3.getInnerTree() != null && !privateGetExternalReferences(t3, ad, state, refs)) {
+                    return (false);
+                }
+                return (true);
+            }
+
+            case FN_CALL_NODE: {
+                // recurse on subtrees
+                AMutableCharArrayString fnName = new AMutableCharArrayString();
+                ExprList args = new ExprList();
+
+                ((FunctionCall) expr).getComponents(fnName, args);
+                for (ExprTree exprTree : args.getExprList()) {
+                    if (!privateGetExternalReferences(exprTree, ad, state, refs)) {
+                        return (false);
+                    }
+                }
+                return (true);
+            }
+
+            case CLASSAD_NODE: {
+                // recurse on subtrees
+                HashMap<CaseInsensitiveString, ExprTree> attrs = new HashMap<CaseInsensitiveString, ExprTree>();
+
+                ((ClassAd) expr).getComponents(attrs);
+                for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
+                    if (!privateGetExternalReferences(entry.getValue(), ad, state, refs)) {
+                        return (false);
+                    }
+                }
+                return (true);
+            }
+
+            case EXPR_LIST_NODE: {
+                // recurse on subtrees
+                ExprList exprs = new ExprList();
+                ((ExprList) expr).getComponents(exprs);
+                for (ExprTree exprTree : exprs.getExprList()) {
+                    if (!privateGetExternalReferences(exprTree, ad, state, refs)) {
+                        return (false);
+                    }
+                }
+                return (true);
+            }
+
+            default:
+                return false;
+        }
+    }
+
+    /* Reference is an ordered set of Strings <The ordering uses less than ignore case>. Example below
+     *  TreeSet<String> references = new TreeSet<String>(
+     *        new Comparator<String>(){
+     *            public int compare(String o1, String o2) {
+     *    return o1.compareToIgnoreCase(o2);
+     *    }
+     *            });
+     *
+     // PortReferences is a Map<ClassAd,OrderedSet<Strings>> */
+    public boolean getInternalReferences(ExprTree tree, TreeSet<String> refs, boolean fullNames)
+            throws HyracksDataException {
+        EvalState state = new EvalState();
+
+        // Treat this ad as the root of the tree for reference tracking.
+        // If an attribute is only present in a parent scope of this ad,
+        // then we want to treat it as an external reference.
+        state.setRootAd(this);
+        state.setCurAd(this);
+
+        return (privateGetInternalReferences(tree, this, state, refs, fullNames));
+    }
+
+    //this is closely modelled off of _GetExternalReferences in the new_classads.
+    public boolean privateGetInternalReferences(ExprTree expr, ClassAd ad, EvalState state, TreeSet<String> refs,
+            boolean fullNames) throws HyracksDataException {
+
+        switch (expr.getKind()) {
+            //nothing to be found here!
+            case LITERAL_NODE: {
+                return true;
+            }
+
+            case ATTRREF_NODE: {
+                ClassAd start = new ClassAd();
+                ExprTreeHolder tree = new ExprTreeHolder();
+                ExprTreeHolder result = new ExprTreeHolder();
+                AMutableCharArrayString attr = new AMutableCharArrayString();
+                Value val = new Value();
+                MutableBoolean abs = new MutableBoolean();
+
+                ((AttributeReference) expr).getComponents(tree, attr, abs);
+
+                //figuring out which state to base this off of
+                if (tree.getInnerTree() == null) {
+                    start = abs.booleanValue() ? state.getRootAd() : state.getCurAd();
+                    //remove circularity
+                    if (abs.booleanValue() && (start == null)) {
+                        return false;
+                    }
+                } else {
+                    boolean orig_inAttrRefScope = state.isInAttrRefScope();
+                    state.setInAttrRefScope(true);
+                    boolean rv = privateGetInternalReferences(tree, ad, state, refs, fullNames);
+                    state.setInAttrRefScope(orig_inAttrRefScope);
+                    if (!rv) {
+                        return false;
+                    }
+
+                    if (!tree.publicEvaluate(state, val)) {
+                        return false;
+                    }
+
+                    // TODO Do we need extra handling for list values?
+                    //   Should types other than undefined, error, or list
+                    //   cause a failure?
+                    if (val.isUndefinedValue()) {
+                        return true;
+                    }
+
+                    //otherwise, if the tree didn't evaluate to a classad,
+                    //we have a problemo, mon.
+                    //TODO: but why?
+                    if (!val.isClassAdValue(start)) {
+                        return false;
+                    }
+                }
+
+                ClassAd curAd = state.getCurAd();
+                switch (start.lookupInScope(attr.toString(), result, state)) {
+                    case EVAL_ERROR_Int:
+                        return false;
+                    //attr is external, so let's find the internals in that
+                    //result
+                    //JUST KIDDING
+                    case EVAL_UNDEF_Int: {
+
+                        //boolean rval = _GetInternalReferences(result, ad, state, refs, fullNames);
+                        //state.getCurAd() = curAd;
+                        return true;
+                    }
+
+                    case EVAL_OK_Int: {
+                        //whoo, it's internal.
+                        // Check whether the attribute was found in the root
+                        // ad for this evaluation and that the attribute isn't
+                        // one of our special ones (self, parent, my, etc.).
+                        // If the ad actually has an attribute with the same
+                        // name as one of our special attributes, then count
+                        // that as an internal reference.
+                        // TODO LookupInScope() knows whether it's returning
+                        //   the expression of one of the special attributes
+                        //   or that of an attribute that actually appears in
+                        //   the ad. If it told us which one, then we could
+                        //   avoid the Lookup() call below.
+                        if (state.getCurAd() == state.getRootAd() && state.getCurAd().lookup(attr.toString()) != null) {
+                            refs.add(attr.toString());
+                        }
+                        if (state.getDepthRemaining() <= 0) {
+                            state.setCurAd(curAd);
+                            return false;
+                        }
+                        state.decrementDepth();
+
+                        boolean rval = privateGetInternalReferences(result, ad, state, refs, fullNames);
+
+                        state.incrementDepth();
+                        //TODO: Does this actually matter?
+                        state.setCurAd(curAd);
+                        return rval;
+                    }
+
+                    case EVAL_FAIL_Int:
+                    default:
+                        // "enh??"
+                        return false;
+                }
+            }
+
+            case OP_NODE: {
+
+                //recurse on subtrees
+                AMutableInt32 op = new AMutableInt32(0);
+                ExprTreeHolder t1 = new ExprTreeHolder();
+                ExprTreeHolder t2 = new ExprTreeHolder();
+                ExprTreeHolder t3 = new ExprTreeHolder();
+                ((Operation) expr).getComponents(op, t1, t2, t3);
+                if (t1.getInnerTree() != null && !privateGetInternalReferences(t1, ad, state, refs, fullNames)) {
+                    return false;
+                }
+
+                if (t2.getInnerTree() != null && !privateGetInternalReferences(t2, ad, state, refs, fullNames)) {
+                    return false;
+                }
+
+                if (t3.getInnerTree() != null && !privateGetInternalReferences(t3, ad, state, refs, fullNames)) {
+                    return false;
+                }
+                return true;
+            }
+
+            case FN_CALL_NODE: {
+                //recurse on the subtrees!
+                AMutableCharArrayString fnName = new AMutableCharArrayString();
+                ExprList args = new ExprList();
+
+                ((FunctionCall) expr).getComponents(fnName, args);
+                for (ExprTree exprTree : args.getExprList()) {
+                    if (!privateGetInternalReferences(exprTree, ad, state, refs, fullNames)) {
+                        return false;
+                    }
+                }
+
+                return true;
+            }
+
+            case CLASSAD_NODE: {
+                //also recurse on subtrees...
+                HashMap<CaseInsensitiveString, ExprTree> attrs = new HashMap<CaseInsensitiveString, ExprTree>();
+
+                // If this ClassAd is only being used here as the scoping
+                // for an attribute reference, don't recurse into all of
+                // its attributes.
+                if (state.isInAttrRefScope()) {
+                    return true;
+                }
+
+                ((ClassAd) expr).getComponents(attrs);
+                for (Entry<CaseInsensitiveString, ExprTree> entry : attrs.entrySet()) {
+                    if (state.getDepthRemaining() <= 0) {
+                        return false;
+                    }
+                    state.decrementDepth();
+
+                    boolean ret = privateGetInternalReferences(entry.getValue(), ad, state, refs, fullNames);
+
+                    state.incrementDepth();
+                    if (!ret) {
+                        return false;
+                    }
+                }
+
+                return true;
+            }
+
+            case EXPR_LIST_NODE: {
+                ExprList exprs = new ExprList();
+
+                ((ExprList) expr).getComponents(exprs);
+                for (ExprTree exprTree : exprs.getExprList()) {
+                    if (state.getDepthRemaining() <= 0) {
+                        return false;
+                    }
+                    state.decrementDepth();
+
+                    boolean ret = privateGetInternalReferences(exprTree, ad, state, refs, fullNames);
+
+                    state.incrementDepth();
+                    if (!ret) {
+                        return false;
+                    }
+                }
+
+                return true;
+            }
+
+            default:
+                return false;
+
+        }
+    }
+
+    public boolean publicFlatten(ExprTree tree, Value val, ExprTreeHolder fexpr) throws HyracksDataException {
+        EvalState state = new EvalState();
+
+        state.setScopes(this);
+        return (tree.publicFlatten(state, val, fexpr));
+    }
+
+    public boolean flattenAndInline(ExprTree tree, Value val, ExprTreeHolder fexpr) throws HyracksDataException {
+        EvalState state = new EvalState();
+
+        state.setScopes(this);
+        state.setFlattenAndInline(true);
+        return (tree.publicFlatten(state, val, fexpr));
+    }
+
+    public void chainToAd(ClassAd new_chain_parent_ad) {
+        if (new_chain_parent_ad != null) {
+            chainedParentAd = new_chain_parent_ad;
+        }
+    }
+
+    public int pruneChildAd() {
+        int iRet = 0;
+
+        if (chainedParentAd != null) {
+            // loop through cleaning all expressions which are the same.
+            Iterator<Entry<CaseInsensitiveString, ExprTree>> it = attrList.entrySet().iterator();
+            while (it.hasNext()) {
+                Entry<CaseInsensitiveString, ExprTree> entry = it.next();
+                ExprTree tree = chainedParentAd.lookup(entry.getKey());
+
+                if (tree != null && tree.sameAs(entry.getValue())) {
+                    // 1st remove from dirty list
+                    it.remove();
+                    iRet++;
+                }
+            }
+        }
+
+        return iRet;
+    }
+
+    public ClassAd getChainedParentAd() {
+        return chainedParentAd;
+    }
+
+    public void setValue(ClassAd value) {
+        this.attrList = value.attrList;
+        this.alternateScope = value.alternateScope;
+        this.chainedParentAd = value.chainedParentAd;
+        this.parentScope = value.parentScope;
+        this.size = value.size;
+    }
+
+    @Override
+    public int size() {
+        return attrList.size();
+    }
+
+    public static void valStr(AMutableCharArrayString szUnparsedValue, ExprTree pTree) {
+        szUnparsedValue.appendString(pTree.toString());
+    }
+
+    public static void valStr(AMutableCharArrayString szOut, boolean tValue) {
+        szOut.appendString(tValue ? "true" : "false");
+    }
+
+    @Override
+    public NodeKind getKind() {
+        return NodeKind.CLASSAD_NODE;
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val) throws HyracksDataException {
+        val.setClassAdValue(this);
+        return (true);
+    }
+
+    public void insertAttr(String name, double value) throws HyracksDataException {
+        insertAttr(name, value, NumberFactor.NO_FACTOR);
+    }
+
+    public void createParser() {
+        parser = new ClassAdParser();
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdFunc.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdFunc.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdFunc.java
new file mode 100644
index 0000000..4e77bc0
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdFunc.java
@@ -0,0 +1,25 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public interface ClassAdFunc {
+    public boolean call(String name, ExprList argList, EvalState state, Value val) throws HyracksDataException;
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdTime.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdTime.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdTime.java
new file mode 100644
index 0000000..66c5f56
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/ClassAdTime.java
@@ -0,0 +1,278 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import java.util.Calendar;
+import java.util.TimeZone;
+
+/*
+ * If not absolute, we only care about the milliseconds value
+ * If absolute, we care about the milliseconds and the calendar
+ */
+public class ClassAdTime {
+
+    private Calendar timeZoneCalendar;
+    private boolean isAbsolute;
+
+    public ClassAdTime(ClassAdTime t) {
+        this.isAbsolute = t.isAbsolute;
+        this.timeZoneCalendar = Calendar.getInstance(t.timeZoneCalendar.getTimeZone());
+        this.timeZoneCalendar.setTimeInMillis(t.timeZoneCalendar.getTimeInMillis());
+    }
+
+    public TimeZone getTimeZone() {
+        return timeZoneCalendar.getTimeZone();
+    }
+
+    public long getRelativeTime() {
+        return timeZoneCalendar.getTimeInMillis();
+    }
+
+    // setTimeZone (parameter is in seconds)
+    public void setTimeZone(int offset) {
+        timeZoneCalendar.setTimeZone(TimeZone.getTimeZone(TimeZone.getAvailableIDs(offset)[0]));
+        //int delta = offset - getOffsetWithDaytimeSaving();
+        //timeZoneCalendar.setTimeZone(TimeZone.getTimeZone(TimeZone.getAvailableIDs(offset + delta)[0]));
+    }
+
+    // Format returned YYYY
+    public int getYear() {
+        return timeZoneCalendar.get(Calendar.YEAR);
+    }
+
+    // Format returned {0-11}
+    public int getMonth() {
+        return timeZoneCalendar.get(Calendar.MONTH);
+    }
+
+    // Format returned {1-365}
+    public int getDayOfYear() {
+        return timeZoneCalendar.get(Calendar.DAY_OF_YEAR);
+    }
+
+    // Format returned {1-31}
+    public int getDayOfMonth() {
+        return timeZoneCalendar.get(Calendar.DAY_OF_MONTH);
+    }
+
+    // Format returned {1-7}
+    public int getDayOfWeek() {
+        return timeZoneCalendar.get(Calendar.DAY_OF_WEEK);
+    }
+
+    // Format returned {0-23}
+    public int getHours() {
+        return timeZoneCalendar.get(Calendar.HOUR_OF_DAY);
+    }
+
+    // Format returned {0-59}
+    public int getMinutes() {
+        return timeZoneCalendar.get(Calendar.MINUTE);
+    }
+
+    // Format returned {0-59}
+    public int getSeconds() {
+        return timeZoneCalendar.get(Calendar.SECOND);
+    }
+
+    public void setRelativeTime(long ms) {
+        timeZoneCalendar.setTimeInMillis(ms);
+    }
+
+    // Format returned YYYY
+    public void setYear(int year) {
+        timeZoneCalendar.set(Calendar.YEAR, year);
+    }
+
+    // Format returned {0-11}
+    public void setMonth(int month) {
+        timeZoneCalendar.set(Calendar.MONTH, month);
+    }
+
+    // Format returned {1-365}
+    public void setDayOfYear(int dayOfYear) {
+        timeZoneCalendar.set(Calendar.DAY_OF_YEAR, dayOfYear);
+    }
+
+    // Format returned {1-31}
+    public void setDayOfMonth(int day) {
+        timeZoneCalendar.set(Calendar.DAY_OF_MONTH, day);
+    }
+
+    // Format returned {1-7}
+    public void setDayOfWeek(int day) {
+        timeZoneCalendar.set(Calendar.DAY_OF_WEEK, day);
+    }
+
+    // Format returned {0-23}
+    public void setHours(int hours) {
+        timeZoneCalendar.set(Calendar.HOUR_OF_DAY, hours);
+    }
+
+    // Format returned {0-59}
+    public void setMinutes(int min) {
+        timeZoneCalendar.set(Calendar.MINUTE, min);
+    }
+
+    // Format returned {0-59}
+    public void setSeconds(int seconds) {
+        timeZoneCalendar.set(Calendar.SECOND, seconds);
+    }
+
+    public ClassAdTime() {
+        this.isAbsolute = true;
+        this.timeZoneCalendar = Calendar.getInstance();
+        this.timeZoneCalendar.setTimeInMillis(0);
+    }
+
+    public ClassAdTime(String timeZoneId) {
+        this.isAbsolute = true;
+        this.timeZoneCalendar = Calendar.getInstance(TimeZone.getTimeZone(timeZoneId));
+        this.timeZoneCalendar.setTimeInMillis(0);
+    }
+
+    public ClassAdTime(long ms, boolean isAbsolute) {
+        this.isAbsolute = isAbsolute;
+        this.timeZoneCalendar = Calendar.getInstance();
+        this.timeZoneCalendar.setTimeInMillis(ms);
+    }
+
+    public ClassAdTime(boolean isAbsolute) {
+        this.isAbsolute = isAbsolute;
+        this.timeZoneCalendar = Calendar.getInstance();
+        this.timeZoneCalendar.setTimeInMillis(System.currentTimeMillis());
+    }
+
+    //int i is in seconds
+    public ClassAdTime(long ms, int i) {
+        this.isAbsolute = true;
+        this.timeZoneCalendar = Calendar.getInstance(TimeZone.getTimeZone(TimeZone.getAvailableIDs(i)[0]));
+        this.timeZoneCalendar.setTimeInMillis(ms);
+    }
+
+    public void setValue(ClassAdTime t) {
+        this.isAbsolute = t.isAbsolute;
+        this.timeZoneCalendar.setTimeZone(t.timeZoneCalendar.getTimeZone());
+        this.timeZoneCalendar.setTimeInMillis(t.timeZoneCalendar.getTimeInMillis());
+    }
+
+    public void reset() {
+        this.isAbsolute = true;
+        this.timeZoneCalendar.setTimeInMillis(System.currentTimeMillis());
+    }
+
+    public void makeAbsolute(boolean absolute) {
+        this.isAbsolute = absolute;
+    }
+
+    public void setValue(long secs) {
+        this.timeZoneCalendar.setTimeInMillis(secs);
+    }
+
+    public void setValue(long secs, boolean absolute) {
+        this.isAbsolute = absolute;
+        this.timeZoneCalendar.setTimeInMillis(secs);
+    }
+
+    // This probably should be double checked
+    @Override
+    public boolean equals(Object t) {
+        if (t instanceof ClassAdTime) {
+            return (((ClassAdTime) t).timeZoneCalendar.getTimeInMillis() == timeZoneCalendar.getTimeInMillis()
+                    && ((ClassAdTime) t).isAbsolute == isAbsolute);
+        }
+        return false;
+    }
+
+    public ClassAdTime subtract(ClassAdTime t) {
+        return new ClassAdTime(timeZoneCalendar.getTimeInMillis() - t.timeZoneCalendar.getTimeInMillis(), isAbsolute);
+    }
+
+    public void makeLocalAbsolute() {
+        this.isAbsolute = true;
+    }
+
+    public void fromAbsoluteToRelative() {
+        this.isAbsolute = false;
+    }
+
+    public void fromRelativeToAbsolute() {
+        this.isAbsolute = true;
+    }
+
+    public int getOffset() {
+        return timeZoneCalendar.getTimeZone().getRawOffset();//(timeZoneCalendar.getTimeInMillis());
+    }
+
+    /*
+        public int getOffsetWithDaytimeSaving() {
+            return timeZoneCalendar.getTimeZone().getOffset((timeZoneCalendar.getTimeInMillis()));
+        }
+    */
+    public void setEpochTime() {
+        this.timeZoneCalendar.setTimeInMillis(0);
+        this.isAbsolute = true;
+    }
+
+    public ClassAdTime plus(long relativeTime, boolean absolute) {
+        return new ClassAdTime(timeZoneCalendar.getTimeInMillis() + relativeTime, absolute);
+    }
+
+    public ClassAdTime subtract(ClassAdTime t, boolean b) {
+        return new ClassAdTime(timeZoneCalendar.getTimeInMillis() + t.timeZoneCalendar.getTimeInMillis(), b);
+    }
+
+    public ClassAdTime multiply(long longValue, boolean b) {
+        return new ClassAdTime(timeZoneCalendar.getTimeInMillis() * longValue, b);
+    }
+
+    public ClassAdTime divide(long longValue, boolean b) {
+        return new ClassAdTime(timeZoneCalendar.getTimeInMillis() / longValue, b);
+    }
+
+    public void setDefaultTimeZone() {
+        this.timeZoneCalendar.setTimeZone(Calendar.getInstance().getTimeZone());
+    }
+
+    public long getTimeInMillis() {
+        return timeZoneCalendar.getTimeInMillis();
+    }
+
+    public void setValue(Calendar instance, ClassAdTime now) {
+        this.timeZoneCalendar = instance;
+        this.timeZoneCalendar.setTimeInMillis(now.getTimeInMillis());
+    }
+
+    public ClassAdTime getGMTCopy() {
+        return new ClassAdTime(timeZoneCalendar.getTimeInMillis(), 0);
+    }
+
+    public long getTime() {
+        return timeZoneCalendar.getTimeInMillis();
+    }
+
+    public void isAbsolute(boolean b) {
+        this.isAbsolute = b;
+    }
+
+    public Calendar getCalendar() {
+        return timeZoneCalendar;
+    }
+
+}


[29/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/data/external-parser/jobads.txt
----------------------------------------------------------------------
diff --git a/asterix-app/data/external-parser/jobads.txt b/asterix-app/data/external-parser/jobads.txt
new file mode 100644
index 0000000..2ca4919
--- /dev/null
+++ b/asterix-app/data/external-parser/jobads.txt
@@ -0,0 +1,12869 @@
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446112223; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.179100000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.850540000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_2696692,ChtcWrapper159.out,AuditLog.159,simu_3_159.txt,harvest.log,159.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 2.195400000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.056100000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/159"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134176; 
+        LastMatchTime = 1446112222; 
+        LastJobLeaseRenewal = 1446134176; 
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582557; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=159 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134177; 
+        QDate = 1446105741; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/159/process.log"; 
+        JobCurrentStartDate = 1446112222; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "159+159"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 21954; 
+        AutoClusterId = 24; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.152"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49581933; 
+        EnteredCurrentStatus = 1446134176; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446112222; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582557.0#1446105741"; 
+        RemoteSysCpu = 1.370000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.195400000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e352.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/159/,/home/xguo23/finally_2/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 125604; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.152:39021>#1444772294#9281#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 159+159"; 
+        CumulativeSlotTime = 2.195400000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 21953; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446111648; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.235300000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_818403,ChtcWrapper211.out,AuditLog.211,simu_3_211.txt,harvest.log,211.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 2.252000000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.060300000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134167; 
+        QDate = 1446105734; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134167; 
+        LastMatchTime = 1446111647; 
+        LastJobLeaseRenewal = 1446134167; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582533; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211/process.log"; 
+        JobCurrentStartDate = 1446111647; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=211 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "211+211"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 22520; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.61"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134167; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446111647; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582533.0#1446105734"; 
+        RemoteSysCpu = 1.370000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.252000000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e261.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/211/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 126608; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.61:49736>#1444759807#6759#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 211+211"; 
+        CumulativeSlotTime = 2.252000000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 22519; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446134109; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.400000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "chtcinnerwrapper,CURLTIME_137795,ChtcWrapper11021.out,R2011b_INFO,AuditLog.11021,SLIBS2.tar.gz,CODEBLOWUP"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5124; 
+        RemoteWallClockTime = 5.800000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727270000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 160; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 160; 
+        RecentStatsLifetimeStarter = 48; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134165; 
+        QDate = 1446134012; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134165; 
+        LastMatchTime = 1446134107; 
+        LastJobLeaseRenewal = 1446134165; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49584018; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/11021/process.log"; 
+        JobCurrentStartDate = 1446134107; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=11021 --"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "11021+11021"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 58; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.39"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134165; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446134107; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49584018.0#1446134012"; 
+        RemoteSysCpu = 1.200000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 5.800000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 14; 
+        LastRemoteHost = "slot1@e239.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/11021/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5124; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.39:54850>#1445038698#5043#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 11021+11021"; 
+        CumulativeSlotTime = 5.800000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 14; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1139127; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 56; 
+        ImageSize = 7500; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        WantGlidein = true; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/11021"
+    ]
+
+    [
+        BlockWrites = 4; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446108996; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 2.477600000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.850540000000000E+05; 
+        ResidentSetSize = 100000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "harvest.log,ChtcWrapper407.out,AuditLog.407,CURLTIME_1861323,407.out,simu_3_407.txt"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 123648; 
+        RemoteWallClockTime = 2.513300000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.056100000000000E+04; 
+        LastRejMatchReason = "PREEMPTION_REQUIREMENTS == False "; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 3976; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 30280; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/407"; 
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134128; 
+        LastMatchTime = 1446108995; 
+        LastJobLeaseRenewal = 1446134128; 
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582261; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=407 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134128; 
+        QDate = 1446105631; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/407/process.log"; 
+        JobCurrentStartDate = 1446108995; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "407+407"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 25133; 
+        AutoClusterId = 38210; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 16; 
+        ExitBySignal = false; 
+        DAGManJobId = 49581933; 
+        EnteredCurrentStatus = 1446134128; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446108995; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582261.0#1446105631"; 
+        RemoteSysCpu = 2.770000000000000E+02; 
+        LastRejMatchTime = 1446108994; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 2.513300000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 906; 
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/407/,/home/xguo23/finally_2/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 76112; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1604#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 407+407"; 
+        CumulativeSlotTime = 2.513300000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 313; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 25132; 
+        ImageSize = 125000
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446121054; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.293400000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_37424,ChtcWrapper409.out,AuditLog.409,simu_3_409.txt,harvest.log,409.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.305100000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.787300000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409"; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134104; 
+        LastMatchTime = 1446121053; 
+        LastJobLeaseRenewal = 1446134104; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49583239; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=409 -- 3"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134104; 
+        QDate = 1446106003; 
+        JobLeaseDuration = 2400; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409/process.log"; 
+        JobCurrentStartDate = 1446121053; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "409+409"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 13051; 
+        AutoClusterId = 24; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.242"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134104; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446121053; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583239.0#1446106003"; 
+        RemoteSysCpu = 9.300000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.305100000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e442.chtc.WISC.EDU"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/409/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 127216; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.242:38884>#1443991450#10456#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 409+409"; 
+        CumulativeSlotTime = 1.305100000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 13050; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1445943853; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.852360000000000E+05; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.843670000000000E+05; 
+        ResidentSetSize = 125000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.902470000000000E+05; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.076600000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134099; 
+        QDate = 1445938922; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134099; 
+        LastMatchTime = 1445943852; 
+        LastJobLeaseRenewal = 1446134099; 
+        DAGManNodesLog = "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49573720; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/finally/Simulation_condor/model_3/180/process.log"; 
+        JobCurrentStartDate = 1445943852; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "180+180"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 190247; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.72"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49572657; 
+        EnteredCurrentStatus = 1446134099; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1445943852; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49573720.0#1445938922"; 
+        RemoteSysCpu = 1.835000000000000E+03; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.902470000000000E+05; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e272.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 123680; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.72:29075>#1444753997#6000#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 180+180"; 
+        CumulativeSlotTime = 1.902470000000000E+05; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 190245; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/finally/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/finally/Simulation_condor/model_3/180"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446114726; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.908100000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 75000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "harvest.log,232.out,ChtcWrapper232.out,AuditLog.232,CURLTIME_1864147,simu_3_232.txt"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 118772; 
+        RemoteWallClockTime = 1.933800000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.791100000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 12; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 26436; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134062; 
+        QDate = 1446105779; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134062; 
+        LastMatchTime = 1446114724; 
+        LastJobLeaseRenewal = 1446134062; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582659; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232/process.log"; 
+        JobCurrentStartDate = 1446114724; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=232 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "232+232"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 19338; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582200; 
+        EnteredCurrentStatus = 1446134062; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446114724; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582659.0#1446105779"; 
+        RemoteSysCpu = 1.790000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.933800000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 615; 
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/232/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 71268; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1612#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 232+232"; 
+        CumulativeSlotTime = 1.933800000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 3; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216668; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 19336; 
+        ImageSize = 125000; 
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232"
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446133964; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.200000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "R2011b_INFO,CODEBLOWUP,AuditLog.10012,SLIBS2.tar.gz,ChtcWrapper10012.out,CURLTIME_2575055,chtcinnerwrapper"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5128; 
+        RemoteWallClockTime = 7.700000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727355000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 160; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 160; 
+        LocalSysCpu = 0.0; 
+        WantGlidein = true; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/10012"; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 67; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134040; 
+        LastMatchTime = 1446133963; 
+        LastJobLeaseRenewal = 1446134040; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49583905; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=10012 --"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134040; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/10012/process.log"; 
+        JobCurrentStartDate = 1446133963; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        JobLeaseDuration = 2400; 
+        QDate = 1446133888; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "10012+10012"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 77; 
+        AutoClusterId = 38267; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.244.69"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134040; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446133963; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583905.0#1446133888"; 
+        RemoteSysCpu = 1.200000000000000E+01; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 7.700000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 12; 
+        LastRemoteHost = "slot1_2@e189.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/10012/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5128; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.244.69:4177>#1444973293#3769#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 10012+10012"; 
+        CumulativeSlotTime = 7.700000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 12; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1211433; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 76; 
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446115779; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.811800000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.847170000000000E+05; 
+        ResidentSetSize = 150000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_3140097,ChtcWrapper3.out,AuditLog.3,simu_3_3.txt,harvest.log,3.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.824800000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.789600000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134026; 
+        QDate = 1446105835; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134026; 
+        LastMatchTime = 1446115778; 
+        LastJobLeaseRenewal = 1446134026; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582786; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3/process.log"; 
+        JobCurrentStartDate = 1446115778; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=3 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "3+3"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 18248; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.245.107"; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 0; 
+        ExitBySignal = false; 
+        DAGManJobId = 49582778; 
+        EnteredCurrentStatus = 1446134026; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 0; 
+        TransferIn = false; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446115778; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582786.0#1446105835"; 
+        RemoteSysCpu = 1.080000000000000E+02; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 1.824800000000000E+04; 
+        WantCheckpoint = false; 
+        BlockReads = 0; 
+        LastRemoteHost = "slot1@e307.chtc.wisc.edu"; 
+        TransferInput = "/home/xguo23/model_3_1.46/Simulation_condor/data/3/,/home/xguo23/model_3_1.46/Simulation_condor/data/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 1000000; 
+        ResidentSetSize_RAW = 125940; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.245.107:63744>#1444685448#11070#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 3+3"; 
+        CumulativeSlotTime = 1.824800000000000E+04; 
+        JobRunCount = 1; 
+        RecentBlockReads = 0; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1216669; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 18247; 
+        ImageSize = 1000000; 
+        Cmd = "/home/xguo23/model_3_1.46/Simulation_condor/chtcjobwrapper"; 
+        LocalSysCpu = 0.0; 
+        Iwd = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3"
+    ]
+
+    [
+        BlockWrites = 506; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446133964; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.100000000000000E+01; 
+        NiceUser = false; 
+        BytesRecvd = 1.220270000000000E+06; 
+        RequestMemory = 1000; 
+        ResidentSetSize = 7500; 
+        StreamOut = false; 
+        SpooledOutputFiles = "chtcinnerwrapper,SLIBS2.tar.gz,R2011b_INFO,AuditLog.20111,CURLTIME_1051736,ChtcWrapper20111.out,CODEBLOWUP"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 5056; 
+        RemoteWallClockTime = 5.800000000000000E+01; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 5; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 2.727274000000000E+06; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 164; 
+        TransferInputSizeMB = 1; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 164; 
+        LocalSysCpu = 0.0; 
+        WantGlidein = true; 
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/20111"; 
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper"; 
+        RecentStatsLifetimeStarter = 43; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "dentler"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 506; 
+        CompletionDate = 1446134021; 
+        LastMatchTime = 1446133963; 
+        LastJobLeaseRenewal = 1446134021; 
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log"; 
+        ClusterId = 49583938; 
+        NumJobStarts = 1; 
+        JobUniverse = 5; 
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize"; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=20111 --"; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        JobFinishedHookDone = 1446134021; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "dentler@chtc.wisc.edu"; 
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/20111/process.log"; 
+        JobCurrentStartDate = 1446133963; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        JobLeaseDuration = 2400; 
+        QDate = 1446133922; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "20111+20111"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 
+        NumRestarts = 0; 
+        NumSystemHolds = 0; 
+        CommittedTime = 58; 
+        AutoClusterId = 38259; 
+        ExitStatus = 0; 
+        ShouldTransferFiles = "YES"; 
+        MachineAttrCpus0 = 1; 
+        WantRemoteSyscalls = false; 
+        MyType = "Job"; 
+        CumulativeSuspensionTime = 0; 
+        Rank = 0.0; 
+        StartdPrincipal = "execute-side@matchsession/128.105.244.37"; 
+        WantFlocking = true; 
+        Err = "process.err"; 
+        PeriodicRemove = false; 
+        BlockWriteKbytes = 249656; 
+        ExitBySignal = false; 
+        DAGManJobId = 49583804; 
+        EnteredCurrentStatus = 1446134021; 
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])"; 
+        RecentBlockWriteKbytes = 249656; 
+        TransferIn = false; 
+        IsCHTCSubmit = true; 
+        NumJobMatches = 1; 
+        RootDir = "/"; 
+        JobStartDate = 1446133963; 
+        JobPrio = 0; 
+        CurrentHosts = 0; 
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583938.0#1446133922"; 
+        RemoteSysCpu = 7.000000000000000E+00; 
+        TotalSuspensions = 0; 
+        CommittedSlotTime = 5.800000000000000E+01; 
+        WantCheckpoint = false; 
+        BlockReads = 16; 
+        LastRemoteHost = "slot1_10@e168.chtc.wisc.edu"; 
+        TransferInput = "/home/dentler/ChtcRun/project_auction/20111/,/home/dentler/ChtcRun/project_auction/shared/"; 
+        LocalUserCpu = 0.0; 
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 ); 
+        RequestDisk = 4000000; 
+        ResidentSetSize_RAW = 5056; 
+        OrigMaxHosts = 1; 
+        LastPublicClaimId = "<128.105.244.37:57713>#1445396629#2313#..."; 
+        WantRHEL6 = true; 
+        NumCkpts_RAW = 0; 
+        Out = "process.out"; 
+        SubmitEventNotes = "DAG Node: 20111+20111"; 
+        CumulativeSlotTime = 5.800000000000000E+01; 
+        JobRunCount = 1; 
+        RecentBlockReads = 16; 
+        StreamErr = false; 
+        DiskUsage_RAW = 1205568; 
+        NumCkpts = 0; 
+        StatsLifetimeStarter = 52; 
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0; 
+        LastJobStatus = 2; 
+        JobCurrentStartExecutingDate = 1446115115; 
+        WantRemoteIO = true; 
+        RequestCpus = 1; 
+        NumShadowStarts = 1; 
+        RemoteUserCpu = 1.878200000000000E+04; 
+        NiceUser = false; 
+        RequestMemory = 1000; 
+        BytesRecvd = 2.846290000000000E+05; 
+        ResidentSetSize = 125000; 
+        StreamOut = false; 
+        SpooledOutputFiles = "CURLTIME_2890029,ChtcWrapper260.out,AuditLog.260,simu_3_260.txt,harvest.log,260.out"; 
+        OnExitRemove = true; 
+        ImageSize_RAW = 811948; 
+        RemoteWallClockTime = 1.890300000000000E+04; 
+        MachineAttrSlotWeight0 = 1; 
+        ExecutableSize = 7; 
+        JobStatus = 4; 
+        DAGParentNodeNames = ""; 
+        ExitCode = 0; 
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"; 
+        BytesSent = 3.050700000000000E+04; 
+        LastSuspensionTime = 0; 
+        ExecutableSize_RAW = 6; 
+        RecentBlockReadKbytes = 0; 
+        TransferInputSizeMB = 0; 
+        Matlab = "R2011b"; 
+        BlockReadKbytes = 0; 
+        RecentStatsLifetimeStarter = 1200; 
+        LeaveJobInQueue = false; 
+        TargetType = "Machine"; 
+        WhenToTransferOutput = "ON_EXIT"; 
+        Owner = "xguo23"; 
+        JobNotification = 0; 
+        BufferSize = 524288; 
+        RecentBlockWrites = 0; 
+        CompletionDate = 1446134017; 
+        QDate = 1446105803; 
+        JobLeaseDuration = 2400; 
+        JobFinishedHookDone = 1446134017; 
+        LastMatchTime = 1446115114; 
+        LastJobLeaseRenewal = 1446134017; 
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log"; 
+        ClusterId = 49582724; 
+        JobUniverse = 5; 
+        NumJobStarts = 1; 
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $"; 
+        CoreSize = 0; 
+        OnExitHold = false; 
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"; 
+        In = "/dev/null"; 
+        DiskUsage = 1250000; 
+        EncryptExecuteDirectory = false; 
+        CommittedSuspensionTime = 0; 
+        User = "xguo23@chtc.wisc.edu"; 
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/260/process.log"; 
+        JobCurrentStartDate = 1446115114; 
+        BufferBlockSize = 32768; 
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu"; 
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer ); 
+        MinHosts = 1; 
+        MaxHosts = 1; 
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=260 -- 3"; 
+        PeriodicHold = false; 
+        ProcId = 0; 
+        Environment = ""; 
+        DAGNodeName = "260+260"; 
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 ); 
+        TerminationPending = true; 

<TRUNCATED>


[04/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/resources/jobads.old
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/resources/jobads.old b/asterix-external-data/src/test/resources/jobads.old
new file mode 100644
index 0000000..7a1abd7
--- /dev/null
+++ b/asterix-external-data/src/test/resources/jobads.old
@@ -0,0 +1,1106 @@
+MaxWallTimeMins_RAW = 1315
+CRAB_ASOTimeout = 86400
+MaxHosts = 1
+RequestMemory_RAW = 2000
+CRAB_TFileOutputFiles = {  }
+User = "uscms5616@cms"
+JobFinishedHookDone = 1439847319
+DAG_NodesReady = 0
+OnExitHold = ( ExitCode =!= undefined && ExitCode != 0 )
+CoreSize = -1
+CRAB_DashboardTaskType = "analysis"
+DAG_NodesDone = 25
+CRAB_Attempt = 0
+LastHoldReason = "Spooling input data files"
+WantRemoteSyscalls = false
+MyType = "Job"
+CumulativeSuspensionTime = 0
+MinHosts = 1
+ReleaseReason = "Data files spooled"
+PeriodicHold = false
+PeriodicRemove = ( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )
+Err = "_condor_stderr"
+CRAB_AdditionalOutputFiles = { "combine_output.tar" }
+ProcId = 0
+CRAB_UserGroup = "dcms"
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+EnteredCurrentStatus = 1439847319
+CRAB_SiteWhitelist = {  }
+NumJobStarts = 1
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage"
+JobUniverse = 7
+AutoClusterId = 10378
+In = "/dev/null"
+SUBMIT_TransferOutputRemaps = "_condor_stdout=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.out;_condor_stderr=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.err"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/73/uscms5616/150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+Requirements = true || false && TARGET.OPSYS == "LINUX" && TARGET.ARCH == "X86_64" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory
+CRAB_SplitAlgo = "EventBased"
+CRAB_UserDN = "/C=DE/O=GermanGrid/OU=KIT/CN=Andrew Gilbert"
+ClusterId = 1217455
+WhenToTransferOutput = "ON_EXIT"
+CRAB_AsyncDest = "T2_CH_CERN"
+CompletionDate = 1439847319
+OtherJobRemoveRequirements = DAGManJobId =?= ClusterId
+CRAB_FailedNodeLimit = -1
+BufferSize = 524288
+CRAB_RestURInoAPI = "/crabserver/prod"
+Environment = strcat("PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=",ClusterId,".",ProcId," CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local")
+TargetType = "Machine"
+LeaveJobInQueue = JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )
+CRAB_UserRole = undefined
+JobNotification = 0
+Owner = "uscms5616"
+CondorPlatform = "$CondorPlatform: X86_64-ScientificLinux_6.6 $"
+CRAB_UserHN = "agilbert"
+CommittedTime = 0
+X509UserProxy = "63f0c4d862d8b4e4ddcfd29ed85b6b5899660759"
+QDate = 1439764883
+ExitStatus = 0
+DAG_NodesFailed = 0
+RootDir = "/"
+JobCurrentStartDate = 1439764892
+CurrentHosts = 0
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1217455.0#1439764883"
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+RemoteSysCpu = 0.0
+TotalSuspensions = 0
+WantCheckpoint = false
+CRAB_RestHost = "cmsweb.cern.ch"
+CRAB_RetryOnASOFailures = 1
+Args = "RunJobs.dag"
+TransferInput = "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz"
+CRAB_JobArch = "slc6_amd64_gcc491"
+PeriodicRelease = false
+CRAB_TaskWorker = "vocms052"
+NumCkpts_RAW = 0
+CondorVersion = "$CondorVersion: 8.3.1 Jun 19 2015 $"
+RemoteCondorSetup = ""
+Out = "_condor_stdout"
+ShouldTransferFiles = "YES"
+DAG_NodesPrerun = 0
+DiskUsage = 1
+JobRunCount = 1
+CumulativeSlotTime = 82427.0
+CommittedSlotTime = 0
+LocalUserCpu = 0.0
+CRAB_SiteBlacklist = { "T2_FR_CCIN2P3","T1_IT_CNAF","T1_ES_PIC","T1_UK_RAL","T2_FI_HIP","T2_US_Nebraska" }
+DAG_NodesQueued = 0
+CRAB_JobCount = 25
+JobStartDate = 1439764892
+DAG_Status = 0
+CRAB_AlgoArgs = "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"splitOnRun\": false, \"events_per_job\": 1}, \"halt_job_on_file_boundaries\": false}"
+CRAB_SaveLogsFlag = 0
+CRAB_JobType = "analysis"
+CRAB_TransferOutputs = 1
+ExitBySignal = false
+StreamErr = false
+RemoveKillSig = "SIGUSR1"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+NumRestarts = 0
+NumSystemHolds = 0
+RequestDisk = DiskUsage
+OrigMaxHosts = 1
+JobPrio = 10
+NumCkpts = 0
+BufferBlockSize = 32768
+StageInStart = 1439764886
+ImageSize = 100
+MaxWallTimeMins = 1400
+DiskUsage_RAW = 1
+DAG_NodesUnready = 0
+CommittedSuspensionTime = 0
+CRAB_NumAutomJobRetries = 2
+CRAB_UserVO = "cms"
+CRAB_EDMOutputFiles = {  }
+Cmd = "dag_bootstrap_startup.sh"
+LocalSysCpu = 0.0
+Iwd = "/data/condor_local/spool/7455/0/cluster1217455.proc0.subproc0"
+LastHoldReasonCode = 16
+CRAB_PublishName = "prefit_cms_asimov_A1_5D-59ffde2b5d41be5f0c401d0a6a8a0194"
+CRAB_LumiMask = "{}"
+DAG_InRecovery = 0
+CRAB_MaxPost = 20
+TaskType = "ROOT"
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+LastSuspensionTime = 0
+CRAB_PublishGroupName = 0
+TransferOutputRemaps = undefined
+TransferOutput = "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001"
+CRAB_Workflow = "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+CRAB_JobSW = "CMSSW_7_4_0_pre9"
+DAG_NodesPostrun = 0
+ExitCode = 0
+JobStatus = 4
+RemoteWallClockTime = 82427.0
+ImageSize_RAW = 100
+OnExitRemove = ( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )
+DAG_NodesTotal = 25
+CRAB_InputData = "/MinBias"
+SUBMIT_x509userproxy = "/data/certs/creds/63f0c4d862d8b4e4ddcfd29ed85b6b5899660759"
+StreamOut = false
+CRAB_ReqName = "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+CurrentTime = time()
+HoldKillSig = "SIGUSR1"
+RequestMemory = 2000
+NiceUser = false
+RemoteUserCpu = 0.0
+CRAB_Publish = 0
+RequestCpus = 1
+SUBMIT_Iwd = "/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9"
+WantRemoteIO = true
+CRAB_BlacklistT1 = 0
+StageInFinish = 1439764891
+LastJobStatus = 2
+
+MaxWallTimeMins_RAW = 1315
+CRAB_ASOTimeout = 86400
+MaxHosts = 1
+RequestMemory_RAW = 2000
+CRAB_TFileOutputFiles = {  }
+User = "uscms5050@cms"
+JobFinishedHookDone = 1439773907
+DAG_NodesReady = 0
+OnExitHold = ( ExitCode =!= undefined && ExitCode != 0 )
+CoreSize = -1
+CRAB_DashboardTaskType = "analysis"
+DAG_NodesDone = 30
+CRAB_Attempt = 0
+LastHoldReason = "Spooling input data files"
+WantRemoteSyscalls = false
+MyType = "Job"
+CumulativeSuspensionTime = 0
+MinHosts = 1
+ReleaseReason = "Data files spooled"
+PeriodicHold = false
+PeriodicRemove = ( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )
+Err = "_condor_stderr"
+CRAB_AdditionalOutputFiles = {  }
+ProcId = 0
+CRAB_UserGroup = undefined
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+EnteredCurrentStatus = 1439773907
+CRAB_SiteWhitelist = { "T3_US_FNALLPC","T2_US_Purdue","T2_US_Nebraska" }
+NumJobStarts = 1
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage"
+JobUniverse = 7
+AutoClusterId = 10378
+In = "/dev/null"
+SUBMIT_TransferOutputRemaps = "_condor_stdout=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.out;_condor_stderr=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.err"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/67/uscms5050/150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+Requirements = true || false && TARGET.OPSYS == "LINUX" && TARGET.ARCH == "X86_64" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory
+CRAB_SplitAlgo = "EventBased"
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ferencek/CN=650164/CN=Dinko Ferencek"
+ClusterId = 1206367
+WhenToTransferOutput = "ON_EXIT"
+CRAB_AsyncDest = "T3_US_FNALLPC"
+CompletionDate = 1439773907
+OtherJobRemoveRequirements = DAGManJobId =?= ClusterId
+CRAB_FailedNodeLimit = -1
+BufferSize = 524288
+CRAB_RestURInoAPI = "/crabserver/prod"
+Environment = strcat("PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=",ClusterId,".",ProcId," CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local")
+TargetType = "Machine"
+LeaveJobInQueue = JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )
+x509userproxyexpiration = 1440294044
+CRAB_UserRole = undefined
+JobNotification = 0
+Owner = "uscms5050"
+CondorPlatform = "$CondorPlatform: X86_64-ScientificLinux_6.6 $"
+CRAB_UserHN = "ferencek"
+CommittedTime = 0
+X509UserProxy = "3a7798796bc24a800001338917ec45991bcf0a96"
+QDate = 1439615565
+ExitStatus = 0
+DAG_NodesFailed = 0
+RootDir = "/"
+JobCurrentStartDate = 1439615574
+CurrentHosts = 0
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1206367.0#1439615565"
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+RemoteSysCpu = 0.0
+TotalSuspensions = 0
+WantCheckpoint = false
+CRAB_RestHost = "cmsweb.cern.ch"
+CRAB_RetryOnASOFailures = 1
+Args = "RunJobs.dag"
+TransferInput = "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz"
+CRAB_JobArch = "slc6_amd64_gcc481"
+PeriodicRelease = false
+CRAB_TaskWorker = "vocms052"
+NumCkpts_RAW = 0
+CondorVersion = "$CondorVersion: 8.3.1 Jun 19 2015 $"
+RemoteCondorSetup = ""
+Out = "_condor_stdout"
+ShouldTransferFiles = "YES"
+DAG_NodesPrerun = 0
+DiskUsage = 1
+JobRunCount = 1
+CumulativeSlotTime = 158333.0
+CommittedSlotTime = 0
+LocalUserCpu = 0.0
+CRAB_SiteBlacklist = {  }
+DAG_NodesQueued = 0
+CRAB_JobCount = 30
+JobStartDate = 1439615574
+DAG_Status = 0
+CRAB_AlgoArgs = "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"lheInputFiles\": true, \"splitOnRun\": false, \"events_per_job\": 50000}, \"halt_job_on_file_boundaries\": false}"
+CRAB_SaveLogsFlag = 0
+CRAB_JobType = "analysis"
+CRAB_TransferOutputs = 1
+ExitBySignal = false
+StreamErr = false
+RemoveKillSig = "SIGUSR1"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+NumRestarts = 0
+NumSystemHolds = 0
+RequestDisk = DiskUsage
+OrigMaxHosts = 1
+JobPrio = 10
+NumCkpts = 0
+BufferBlockSize = 32768
+StageInStart = 1439615569
+ImageSize = 100
+MaxWallTimeMins = 1400
+DiskUsage_RAW = 1
+DAG_NodesUnready = 0
+CommittedSuspensionTime = 0
+CRAB_NumAutomJobRetries = 2
+CRAB_UserVO = "cms"
+CRAB_EDMOutputFiles = { "Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE.root" }
+Cmd = "dag_bootstrap_startup.sh"
+LocalSysCpu = 0.0
+Iwd = "/data/condor_local/spool/6367/0/cluster1206367.proc0.subproc0"
+LastHoldReasonCode = 16
+CRAB_PublishName = "LHE-17521057f93ed9cadf21dd45b3505145"
+CRAB_LumiMask = "{}"
+DAG_InRecovery = 0
+CRAB_MaxPost = 20
+TaskType = "ROOT"
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+LastSuspensionTime = 0
+CRAB_PublishGroupName = 0
+TransferOutputRemaps = undefined
+TransferOutput = "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001"
+CRAB_Workflow = "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+CRAB_JobSW = "CMSSW_7_1_18"
+DAG_NodesPostrun = 0
+ExitCode = 0
+JobStatus = 4
+RemoteWallClockTime = 158333.0
+ImageSize_RAW = 100
+OnExitRemove = ( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )
+DAG_NodesTotal = 30
+CRAB_InputData = "/Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8"
+SUBMIT_x509userproxy = "/data/certs/creds/3a7798796bc24a800001338917ec45991bcf0a96"
+StreamOut = false
+CRAB_ReqName = "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+CurrentTime = time()
+HoldKillSig = "SIGUSR1"
+RequestMemory = 2000
+NiceUser = false
+RemoteUserCpu = 0.0
+CRAB_Publish = 1
+RequestCpus = 1
+SUBMIT_Iwd = "/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB"
+WantRemoteIO = true
+CRAB_BlacklistT1 = 0
+StageInFinish = 1439615572
+LastJobStatus = 2
+
+MaxWallTimeMins_RAW = 2800
+StatsLifetimeStarter = 165949
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "grid_cms"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins3@cmsgwms-factory.fnal.gov"
+SubmitEventNotes = "DAG Node: Job53"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+DAGParentNodeNames = ""
+MATCH_GLIDEIN_Site = "CERN"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 163084.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "59069"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+RecentBlockWrites = 0
+CurrentHosts = 0
+MATCH_GLIDEIN_ProcId = 1
+x509UserProxyExpiration = 1440397268
+Iwd = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_CH_CERN_ce302"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 75000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/128.142.45.103"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "689255460"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job53"
+LastPublicClaimId = "<128.142.45.103:55332>#1439963327#3#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_CH_CERN"
+RemoteSysCpu = 1963.0
+CRAB_Retry = 2
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1238992
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2800"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms5111"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_CH_CERN_ce302"
+LastJobLeaseRenewal = 1440131524
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_CH_CERN"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.main"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.kbutanov"
+MATCH_GLIDEIN_SiteWMS_Slot = "Unknown"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms5111@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm-eoscms.cern.ch"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2800
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440131525
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 1
+MATCH_GLIDEIN_Factory = "gfactory_service"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1233705.0#1439964847"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 59069
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1439965573
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/8123da6528ec4abd24562a99b4f2b0ec556bed0b"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440530096"
+MATCH_EXP_JOB_GLIDEIN_Factory = "gfactory_service"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.main"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 2128005.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=53 --cmsswVersion=CMSSW_7_4_7 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v2/50000/6E2F932B-633B-E511-A7AE-F04DA23BCE4C.root\"] --runAndLumis=job_lumis_53.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.53"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 165965.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_CH_CERN"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 53
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2800"
+MATCH_GLIDEIN_ToRetire = 1440530096
+ImageSize = 4250000
+JobCurrentStartDate = 1439965560
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1439965560
+LastMatchTime = 1439965560
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440564896"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+NumJobReconnects = 2
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins3@cmsgwms-factory.fnal.gov"
+SpooledOutputFiles = "jobReport.json.53"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.53"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 165965.0
+JobStatus = 4
+x509UserProxyEmail = "khakimjan.butanov@cern.ch"
+DAGManJobId = 1035690
+RemoteWallClockTime = 165965.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov"
+LastRemoteHost = "glidein_9757_931570227@b635ef6906.cern.ch"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T1_DE_KIT,T2_UK_London_IC,T2_CH_CERN"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 61434
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "1"
+CRAB_localOutputFiles = "stepB_MC.root=stepB_MC_53.root"
+MaxHosts = 1
+CRAB_UserHN = "kbutanov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm-eoscms.cern.ch"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms5111"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 4095188
+MATCH_EXP_Used_Gatekeeper = "ce302.cern.ch:8443/cream-lsf-grid_cms"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "grid_cms"
+MATCH_GLIDEIN_Gatekeeper = "ce302.cern.ch:8443/cream-lsf-grid_cms"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "CERN"
+UserLog = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM#85bfee36-3b82-11e5-be34-001e67abf518"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 2
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1439964847
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.53"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/log/cmsRun_53.log.tar.gz, srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/stepB_MC_53.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "stepB_MC.root" }
+AutoClusterId = 16275
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439209593
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 2
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "CERN"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_KR_KNU"
+ClusterId = 1233705
+BytesSent = 119952.0
+CRAB_PublishName = "crab_25ns_WJetsToLNu_HT600_800-9da7f68dc2032d8626d7e7822bb10506"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/user/kbutanov.03af76ad04ddc195ee96e6a5469f1bbb1777390d/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_CH_CERN"
+MATCH_GLIDEIN_MaxMemMBs = 2800
+RequestMemory = 2000
+EnteredCurrentStatus = 1440131525
+MATCH_GLIDEIN_SiteWMS = "LSF"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/68/uscms5111/150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "689255460"
+CRAB_JobSW = "CMSSW_7_4_7"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 2800
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "Unknown"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440131525
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440564896
+NiceUser = false
+RootDir = "/"
+CommittedTime = 165965
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "LSF"
+
+MaxWallTimeMins_RAW = 1400
+StatsLifetimeStarter = 33352
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "red.unl.edu"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins6@glidein.grid.iu.edu"
+SubmitEventNotes = "DAG Node: Job4"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+MATCH_GLIDEIN_Site = "Nebraska"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 28513.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "2561111"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+CurrentHosts = 0
+RecentBlockWrites = 0
+MATCH_GLIDEIN_ProcId = 8
+x509UserProxyExpiration = 1440171330
+Iwd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_long"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 3750000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/129.93.182.12"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "5092137.0"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job4"
+LastPublicClaimId = "<129.93.182.12:42491>#1440048812#7#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_US_Nebraska"
+RemoteSysCpu = 616.0
+CRAB_Retry = 3
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1148372
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2500"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms3850"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_long"
+LastJobLeaseRenewal = 1440115142
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_US_Nebraska"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.mrodozov"
+MATCH_GLIDEIN_SiteWMS_Slot = "slot1_6@red-d8n12.unl.edu"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms3850@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm.unl.edu"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2500
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440115142
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 0
+MATCH_GLIDEIN_Factory = "OSGGOC"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1235992.0#1440081300"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 2561111
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1440081789
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440616411"
+MATCH_EXP_JOB_GLIDEIN_Factory = "OSGGOC"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 44879356.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=4 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9A89CA60-69FC-E411-9661-0025905C42B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/34F8B66A-D4FB-E411-8F89-842B2B29273C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/7CE6B848-F5FB-E411-A605-0025905A60A8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9E842AA8-54FC-E411-8BC7-000F53273500.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/C44AD465-D4FB-E411-8704-002590200A40.root\",' '\"/store/m
 c/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/90B6CB1B-07FD-E411-BD52-001E67397CBA.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/183FB65F-69FC-E411-A5A8-0025904B7C26.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/5A0A9A0E-EDFB-E411-B95F-00266CF330B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/3E3768F1-61FC-E411-B163-002618943956.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/08DB9DDE-F4FB-E411-9BC9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/CE293F9B-54FC-E411-83E8-AC853D9DACD3.root\",' '\"/store/mc/RunIISpring
 15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/A4479F5F-69FC-E411-B0B5-0025904C6378.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/0419455F-D4FB-E411-AEFA-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E6BD6C76-54FC-E411-A1F2-AC853D9DACD7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/AC15F863-F5FB-E411-8F07-002590DB9286.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/CC9B7EE2-F4FB-E411-BCD9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/5844575F-D4FB-E411-81F5-003048FFD732.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt
 _300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/6EC5205E-D4FB-E411-9885-001E67396BB7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/B63200E8-69FC-E411-B949-0025904C51FC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/14554A42-54FC-E411-86D2-0025905A605E.root\"] --runAndLumis=job_lumis_4.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.4"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 33360.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_US_Nebraska"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 4
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2500"
+MATCH_GLIDEIN_ToRetire = 1440616411
+ImageSize = 1750000
+JobCurrentStartDate = 1440081782
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1440081782
+LastMatchTime = 1440081782
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440651211"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins6@glidein.grid.iu.edu"
+SpooledOutputFiles = "jobReport.json.4"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.4"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 33360.0
+JobStatus = 4
+x509UserProxyEmail = "mircho.nikolaev.rodozov@cern.ch"
+DAGManJobId = 1183604
+RemoteWallClockTime = 33360.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+LastRemoteHost = "glidein_1936_57194584@red-d8n12.unl.edu"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 3661158
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "8"
+CRAB_localOutputFiles = "results.root=results_4.root"
+MaxHosts = 1
+CRAB_UserHN = "mrodozov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm.unl.edu"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms3850"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 1727056
+MATCH_EXP_Used_Gatekeeper = "red.unl.edu red.unl.edu:9619"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "red.unl.edu"
+MATCH_GLIDEIN_Gatekeeper = "red.unl.edu red.unl.edu:9619"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "Nebraska"
+UserLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 42
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1440081300
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.4"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_4.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_4.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "results.root" }
+AutoClusterId = 16278
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439550850
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 3
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "Nebraska"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_CH_CERN"
+ClusterId = 1235992
+BytesSent = 597241.0
+CRAB_PublishName = "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_US_Nebraska"
+MATCH_GLIDEIN_MaxMemMBs = 2500
+RequestMemory = 2000
+EnteredCurrentStatus = 1440115142
+MATCH_GLIDEIN_SiteWMS = "HTCondor"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "5092137.0"
+CRAB_JobSW = "CMSSW_7_4_7_patch2"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 1400
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "slot1_6@red-d8n12.unl.edu"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440115142
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440651211
+NiceUser = false
+RootDir = "/"
+CommittedTime = 33360
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "HTCondor"
+
+MaxWallTimeMins_RAW = 1400
+StatsLifetimeStarter = 31968
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "red-gw1.unl.edu"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins5@gfactory-1.t2.ucsd.edu"
+SubmitEventNotes = "DAG Node: Job3"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+MATCH_GLIDEIN_Site = "Nebraska"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 27257.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "3043383"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+CurrentHosts = 0
+RecentBlockWrites = 0
+MATCH_GLIDEIN_ProcId = 14
+x509UserProxyExpiration = 1440171330
+Iwd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_gw1_long"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 4250000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/129.93.183.127"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "5096573.0"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job3"
+LastPublicClaimId = "<129.93.183.127:56441>#1440063351#7#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_US_Nebraska"
+RemoteSysCpu = 621.0
+CRAB_Retry = 3
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1174388
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2500"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms3850"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_gw1_long"
+LastJobLeaseRenewal = 1440113502
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_US_Nebraska"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.mrodozov"
+MATCH_GLIDEIN_SiteWMS_Slot = "slot1_32@red-d23n7.unl.edu"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms3850@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm.unl.edu"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2500
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440113503
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 0
+MATCH_GLIDEIN_Factory = "SDSC"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1235991.0#1440081300"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 3043383
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1440081533
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440630710"
+MATCH_EXP_JOB_GLIDEIN_Factory = "SDSC"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 44879356.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=3 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/06DE7D5F-D4FB-E411-9C85-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E29E093E-54FC-E411-8AE5-0025905A60FE.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/F0FDF730-EDFB-E411-842B-00261834B51D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/FECCF363-F5FB-E411-85A3-002590DBDFE0.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/0E4CEBFE-ECFB-E411-9F0C-842B2B29273C.root\",' '\"/store/m
 c/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/ECF66DCC-F0FB-E411-84CF-00259074AE32.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/96F29C69-D4FB-E411-9028-842B2B292627.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/6E887F0F-EDFB-E411-875B-BCAEC54B303A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/0C788712-F5FB-E411-AA0E-AC853D9DAC29.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/723A41AE-F4FB-E411-BAA3-0025905C431A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/DA4EA0F5-F4FB-E411-B2AD-00259073E31C.root\",' '\"/store/mc/RunIISpring
 15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/98C8F097-F7FB-E411-9A1F-52540006FB8D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/DE4F8235-5FFC-E411-80CD-0025905A6088.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/EA5D6151-F5FB-E411-99F0-0026B92E0C74.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/205D0CF9-F4FB-E411-934D-000F532734AC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/24BCAED9-F0FB-E411-A35B-00259074AE54.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/A4C160C1-F4FB-E411-A66D-B083FED76C6C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt
 _300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E86B536C-54FC-E411-8787-AC853D9DACE1.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/2E68E42D-EDFB-E411-8027-001E67397CC9.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/A056D12B-EDFB-E411-9E51-52540006FDD6.root\"] --runAndLumis=job_lumis_3.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.3"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 31976.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_US_Nebraska"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 3
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2500"
+MATCH_GLIDEIN_ToRetire = 1440630710
+ImageSize = 2000000
+JobCurrentStartDate = 1440081527
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1440081527
+LastMatchTime = 1440081527
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440665510"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins5@gfactory-1.t2.ucsd.edu"
+SpooledOutputFiles = "jobReport.json.3"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.3"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 31976.0
+JobStatus = 4
+x509UserProxyEmail = "mircho.nikolaev.rodozov@cern.ch"
+DAGManJobId = 1183604
+RemoteWallClockTime = 31976.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+LastRemoteHost = "glidein_11321_920434792@red-d23n7.unl.edu"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 4111436
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "14"
+CRAB_localOutputFiles = "results.root=results_3.root"
+MaxHosts = 1
+CRAB_UserHN = "mrodozov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm.unl.edu"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms3850"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 1756756
+MATCH_EXP_Used_Gatekeeper = "red-gw1.unl.edu red-gw1.unl.edu:9619"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "red-gw1.unl.edu"
+MATCH_GLIDEIN_Gatekeeper = "red-gw1.unl.edu red-gw1.unl.edu:9619"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "Nebraska"
+UserLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 42
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1440081300
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.3"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_3.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_3.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "results.root" }
+AutoClusterId = 16278
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439550850
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 3
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "Nebraska"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_CH_CERN"
+ClusterId = 1235991
+BytesSent = 604821.0
+CRAB_PublishName = "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_US_Nebraska"
+MATCH_GLIDEIN_MaxMemMBs = 2500
+RequestMemory = 2000
+EnteredCurrentStatus = 1440113503
+MATCH_GLIDEIN_SiteWMS = "HTCondor"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "5096573.0"
+CRAB_JobSW = "CMSSW_7_4_7_patch2"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 1400
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "slot1_32@red-d23n7.unl.edu"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440113503
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440665510
+NiceUser = false
+RootDir = "/"
+CommittedTime = 31976
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "HTCondor"
+


[32/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/data/external-parser/dropbox/jobads2.txt
----------------------------------------------------------------------
diff --git a/asterix-app/data/external-parser/dropbox/jobads2.txt b/asterix-app/data/external-parser/dropbox/jobads2.txt
new file mode 100644
index 0000000..f420be4
--- /dev/null
+++ b/asterix-app/data/external-parser/dropbox/jobads2.txt
@@ -0,0 +1,12869 @@
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446112223;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 2.179100000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.850540000000000E+05;
+        ResidentSetSize = 150000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_2696692,ChtcWrapper159.out,AuditLog.159,simu_3_159.txt,harvest.log,159.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 2.195400000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.056100000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/159";
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134176;
+        LastMatchTime = 1446112222;
+        LastJobLeaseRenewal = 1446134176;
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582557;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=159 -- 3";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134177;
+        QDate = 1446105741;
+        JobLeaseDuration = 2400;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/159/process.log";
+        JobCurrentStartDate = 1446112222;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "159+159";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 21954;
+        AutoClusterId = 24;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.152";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49581933;
+        EnteredCurrentStatus = 1446134176;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446112222;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582557.0#1446105741";
+        RemoteSysCpu = 1.370000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 2.195400000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e352.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/159/,/home/xguo23/finally_2/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 125604;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.152:39021>#1444772294#9281#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 159+159";
+        CumulativeSlotTime = 2.195400000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 21953;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446111648;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 2.235300000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.846290000000000E+05;
+        ResidentSetSize = 150000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_818403,ChtcWrapper211.out,AuditLog.211,simu_3_211.txt,harvest.log,211.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 2.252000000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.060300000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134167;
+        QDate = 1446105734;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134167;
+        LastMatchTime = 1446111647;
+        LastJobLeaseRenewal = 1446134167;
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582533;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211/process.log";
+        JobCurrentStartDate = 1446111647;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=211 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "211+211";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 22520;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.61";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582200;
+        EnteredCurrentStatus = 1446134167;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446111647;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582533.0#1446105734";
+        RemoteSysCpu = 1.370000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 2.252000000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e261.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/211/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 126608;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.61:49736>#1444759807#6759#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 211+211";
+        CumulativeSlotTime = 2.252000000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 22519;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper";
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/211"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446134109;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.400000000000000E+01;
+        NiceUser = false;
+        BytesRecvd = 1.220270000000000E+06;
+        RequestMemory = 1000;
+        ResidentSetSize = 7500;
+        StreamOut = false;
+        SpooledOutputFiles = "chtcinnerwrapper,CURLTIME_137795,ChtcWrapper11021.out,R2011b_INFO,AuditLog.11021,SLIBS2.tar.gz,CODEBLOWUP";
+        OnExitRemove = true;
+        ImageSize_RAW = 5124;
+        RemoteWallClockTime = 5.800000000000000E+01;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 5;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.727270000000000E+06;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 160;
+        TransferInputSizeMB = 1;
+        Matlab = "R2011b";
+        BlockReadKbytes = 160;
+        RecentStatsLifetimeStarter = 48;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "dentler";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134165;
+        QDate = 1446134012;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134165;
+        LastMatchTime = 1446134107;
+        LastJobLeaseRenewal = 1446134165;
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log";
+        ClusterId = 49584018;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "dentler@chtc.wisc.edu";
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/11021/process.log";
+        JobCurrentStartDate = 1446134107;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=11021 --";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "11021+11021";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 58;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.39";
+        WantFlocking = true;
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49583804;
+        EnteredCurrentStatus = 1446134165;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446134107;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49584018.0#1446134012";
+        RemoteSysCpu = 1.200000000000000E+01;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 5.800000000000000E+01;
+        WantCheckpoint = false;
+        BlockReads = 14;
+        LastRemoteHost = "slot1@e239.chtc.wisc.edu";
+        TransferInput = "/home/dentler/ChtcRun/project_auction/11021/,/home/dentler/ChtcRun/project_auction/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 4000000;
+        ResidentSetSize_RAW = 5124;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.39:54850>#1445038698#5043#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 11021+11021";
+        CumulativeSlotTime = 5.800000000000000E+01;
+        JobRunCount = 1;
+        RecentBlockReads = 14;
+        StreamErr = false;
+        DiskUsage_RAW = 1139127;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 56;
+        ImageSize = 7500;
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper";
+        WantGlidein = true;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/11021"
+    ]
+
+    [
+        BlockWrites = 4;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446108996;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 2.477600000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.850540000000000E+05;
+        ResidentSetSize = 100000;
+        StreamOut = false;
+        SpooledOutputFiles = "harvest.log,ChtcWrapper407.out,AuditLog.407,CURLTIME_1861323,407.out,simu_3_407.txt";
+        OnExitRemove = true;
+        ImageSize_RAW = 123648;
+        RemoteWallClockTime = 2.513300000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.056100000000000E+04;
+        LastRejMatchReason = "PREEMPTION_REQUIREMENTS == False ";
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 3976;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 30280;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/finally_2/Simulation_condor/model_3/407";
+        Cmd = "/home/xguo23/finally_2/Simulation_condor/chtcjobwrapper";
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134128;
+        LastMatchTime = 1446108995;
+        LastJobLeaseRenewal = 1446134128;
+        DAGManNodesLog = "/home/xguo23/finally_2/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582261;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=407 -- 3";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134128;
+        QDate = 1446105631;
+        JobLeaseDuration = 2400;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/finally_2/Simulation_condor/model_3/407/process.log";
+        JobCurrentStartDate = 1446108995;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "407+407";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 25133;
+        AutoClusterId = 38210;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 16;
+        ExitBySignal = false;
+        DAGManJobId = 49581933;
+        EnteredCurrentStatus = 1446134128;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446108995;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582261.0#1446105631";
+        RemoteSysCpu = 2.770000000000000E+02;
+        LastRejMatchTime = 1446108994;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 2.513300000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 906;
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/finally_2/Simulation_condor/data/407/,/home/xguo23/finally_2/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 76112;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1604#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 407+407";
+        CumulativeSlotTime = 2.513300000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 313;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 25132;
+        ImageSize = 125000
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446121054;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.293400000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.846290000000000E+05;
+        ResidentSetSize = 150000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_37424,ChtcWrapper409.out,AuditLog.409,simu_3_409.txt,harvest.log,409.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 1.305100000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.787300000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409";
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134104;
+        LastMatchTime = 1446121053;
+        LastJobLeaseRenewal = 1446134104;
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49583239;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=409 -- 3";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134104;
+        QDate = 1446106003;
+        JobLeaseDuration = 2400;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/409/process.log";
+        JobCurrentStartDate = 1446121053;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "409+409";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 13051;
+        AutoClusterId = 24;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.242";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582200;
+        EnteredCurrentStatus = 1446134104;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446121053;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583239.0#1446106003";
+        RemoteSysCpu = 9.300000000000000E+01;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.305100000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e442.chtc.WISC.EDU";
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/409/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 127216;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.242:38884>#1443991450#10456#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 409+409";
+        CumulativeSlotTime = 1.305100000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 13050;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1445943853;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.852360000000000E+05;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.843670000000000E+05;
+        ResidentSetSize = 125000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_3753852,ChtcWrapper180.out,AuditLog.180,simu_3_180.txt,harvest.log,180.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 1.902470000000000E+05;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.076600000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134099;
+        QDate = 1445938922;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134099;
+        LastMatchTime = 1445943852;
+        LastJobLeaseRenewal = 1446134099;
+        DAGManNodesLog = "/home/xguo23/finally/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49573720;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/finally/Simulation_condor/model_3/180/process.log";
+        JobCurrentStartDate = 1445943852;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=180 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "180+180";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 190247;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.72";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49572657;
+        EnteredCurrentStatus = 1446134099;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1445943852;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49573720.0#1445938922";
+        RemoteSysCpu = 1.835000000000000E+03;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.902470000000000E+05;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e272.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/finally/Simulation_condor/data/180/,/home/xguo23/finally/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 123680;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.72:29075>#1444753997#6000#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 180+180";
+        CumulativeSlotTime = 1.902470000000000E+05;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 190245;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/finally/Simulation_condor/chtcjobwrapper";
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/finally/Simulation_condor/model_3/180"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446114726;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.908100000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.846290000000000E+05;
+        ResidentSetSize = 75000;
+        StreamOut = false;
+        SpooledOutputFiles = "harvest.log,232.out,ChtcWrapper232.out,AuditLog.232,CURLTIME_1864147,simu_3_232.txt";
+        OnExitRemove = true;
+        ImageSize_RAW = 118772;
+        RemoteWallClockTime = 1.933800000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.791100000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 12;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 26436;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134062;
+        QDate = 1446105779;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134062;
+        LastMatchTime = 1446114724;
+        LastJobLeaseRenewal = 1446134062;
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582659;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232/process.log";
+        JobCurrentStartDate = 1446114724;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=232 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "232+232";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 19338;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.104.55.48";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582200;
+        EnteredCurrentStatus = 1446134062;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446114724;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582659.0#1446105779";
+        RemoteSysCpu = 1.790000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.933800000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 615;
+        LastRemoteHost = "slot1@c029.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/model_3_1.47/Simulation_condor/data/232/,/home/xguo23/model_3_1.47/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 71268;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.104.55.48:26476>#1445344800#1612#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 232+232";
+        CumulativeSlotTime = 1.933800000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 3;
+        StreamErr = false;
+        DiskUsage_RAW = 1216668;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 19336;
+        ImageSize = 125000;
+        Cmd = "/home/xguo23/model_3_1.47/Simulation_condor/chtcjobwrapper";
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/232"
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446133964;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.200000000000000E+01;
+        NiceUser = false;
+        BytesRecvd = 1.220270000000000E+06;
+        RequestMemory = 1000;
+        ResidentSetSize = 7500;
+        StreamOut = false;
+        SpooledOutputFiles = "R2011b_INFO,CODEBLOWUP,AuditLog.10012,SLIBS2.tar.gz,ChtcWrapper10012.out,CURLTIME_2575055,chtcinnerwrapper";
+        OnExitRemove = true;
+        ImageSize_RAW = 5128;
+        RemoteWallClockTime = 7.700000000000000E+01;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 5;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.727355000000000E+06;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 160;
+        TransferInputSizeMB = 1;
+        Matlab = "R2011b";
+        BlockReadKbytes = 160;
+        LocalSysCpu = 0.0;
+        WantGlidein = true;
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/10012";
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper";
+        RecentStatsLifetimeStarter = 67;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "dentler";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134040;
+        LastMatchTime = 1446133963;
+        LastJobLeaseRenewal = 1446134040;
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log";
+        ClusterId = 49583905;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=10012 --";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134040;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "dentler@chtc.wisc.edu";
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/10012/process.log";
+        JobCurrentStartDate = 1446133963;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        JobLeaseDuration = 2400;
+        QDate = 1446133888;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "10012+10012";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 77;
+        AutoClusterId = 38267;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.244.69";
+        WantFlocking = true;
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49583804;
+        EnteredCurrentStatus = 1446134040;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446133963;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583905.0#1446133888";
+        RemoteSysCpu = 1.200000000000000E+01;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 7.700000000000000E+01;
+        WantCheckpoint = false;
+        BlockReads = 12;
+        LastRemoteHost = "slot1_2@e189.chtc.wisc.edu";
+        TransferInput = "/home/dentler/ChtcRun/project_auction/10012/,/home/dentler/ChtcRun/project_auction/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 4000000;
+        ResidentSetSize_RAW = 5128;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.244.69:4177>#1444973293#3769#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 10012+10012";
+        CumulativeSlotTime = 7.700000000000000E+01;
+        JobRunCount = 1;
+        RecentBlockReads = 12;
+        StreamErr = false;
+        DiskUsage_RAW = 1211433;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 76;
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446115779;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.811800000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.847170000000000E+05;
+        ResidentSetSize = 150000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_3140097,ChtcWrapper3.out,AuditLog.3,simu_3_3.txt,harvest.log,3.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 1.824800000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.789600000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134026;
+        QDate = 1446105835;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134026;
+        LastMatchTime = 1446115778;
+        LastJobLeaseRenewal = 1446134026;
+        DAGManNodesLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582786;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3/process.log";
+        JobCurrentStartDate = 1446115778;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=3 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "3+3";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 18248;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.107";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582778;
+        EnteredCurrentStatus = 1446134026;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446115778;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582786.0#1446105835";
+        RemoteSysCpu = 1.080000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.824800000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRemoteHost = "slot1@e307.chtc.wisc.edu";
+        TransferInput = "/home/xguo23/model_3_1.46/Simulation_condor/data/3/,/home/xguo23/model_3_1.46/Simulation_condor/data/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 1000000;
+        ResidentSetSize_RAW = 125940;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.245.107:63744>#1444685448#11070#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 3+3";
+        CumulativeSlotTime = 1.824800000000000E+04;
+        JobRunCount = 1;
+        RecentBlockReads = 0;
+        StreamErr = false;
+        DiskUsage_RAW = 1216669;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 18247;
+        ImageSize = 1000000;
+        Cmd = "/home/xguo23/model_3_1.46/Simulation_condor/chtcjobwrapper";
+        LocalSysCpu = 0.0;
+        Iwd = "/home/xguo23/model_3_1.46/Simulation_condor/model_3/3"
+    ]
+
+    [
+        BlockWrites = 506;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446133964;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.100000000000000E+01;
+        NiceUser = false;
+        BytesRecvd = 1.220270000000000E+06;
+        RequestMemory = 1000;
+        ResidentSetSize = 7500;
+        StreamOut = false;
+        SpooledOutputFiles = "chtcinnerwrapper,SLIBS2.tar.gz,R2011b_INFO,AuditLog.20111,CURLTIME_1051736,ChtcWrapper20111.out,CODEBLOWUP";
+        OnExitRemove = true;
+        ImageSize_RAW = 5056;
+        RemoteWallClockTime = 5.800000000000000E+01;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 5;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 2.727274000000000E+06;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 164;
+        TransferInputSizeMB = 1;
+        Matlab = "R2011b";
+        BlockReadKbytes = 164;
+        LocalSysCpu = 0.0;
+        WantGlidein = true;
+        Iwd = "/home/dentler/ChtcRun/project_auction/results_fix2/20111";
+        Cmd = "/home/dentler/ChtcRun/chtcjobwrapper";
+        RecentStatsLifetimeStarter = 43;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "dentler";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 506;
+        CompletionDate = 1446134021;
+        LastMatchTime = 1446133963;
+        LastJobLeaseRenewal = 1446134021;
+        DAGManNodesLog = "/home/dentler/ChtcRun/project_auction/results_fix2/./mydag.dag.nodes.log";
+        ClusterId = 49583938;
+        NumJobStarts = 1;
+        JobUniverse = 5;
+        AutoClusterAttrs = "JobUniverse,LastCheckpointPlatform,NumCkpts,ClientMachine,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestGPUs,_condor_RequestMemory,RequestCpus,RequestDisk,RequestGPUs,RequestMemory,BIOCHEM,MachineLastMatchTime,ConcurrencyLimits,NiceUser,Rank,Requirements,ImageSize,MemoryRequirements,User,RemoteGroup,SubmitterGroup,SubmitterUserPrio,Group,WIDsTheme,InteractiveJob,Is_Resumable,WantFlocking,WantGlidein,Scheduler,Owner,JobStart,MemoryUsage,IsExpressQueueJob,DiskUsage,HEP_VO,IsDesktop,OSG_VO,x509userproxysubject,PassedTest,IsLocalCMSJob,IsLocalCMSSlot,IsSAMSlot,IsSAMJob,MaxDiskTempC,IsDedicated,estimated_run_hours,IsCHTCSubmit,RequiresCVMFS,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot2_ExpectedMachineGracefulDrainingCompletion,Slot2_JobStarts,Slot2_SelfMonitorAge,Slot3_ExpectedMachineGracefulDrainingCompletion,Slot3_JobStarts,Slot3_SelfMonitorAge,Slot4_ExpectedMachineGracefulDr
 ainingCompletion,Slot4_JobStarts,Slot4_SelfMonitorAge,Slot5_ExpectedMachineGracefulDrainingCompletion,Slot5_JobStarts,Slot5_SelfMonitorAge,Slot6_ExpectedMachineGracefulDrainingCompletion,Slot6_JobStarts,Slot6_SelfMonitorAge,Slot7_ExpectedMachineGracefulDrainingCompletion,Slot7_JobStarts,Slot7_SelfMonitorAge,Slot8_ExpectedMachineGracefulDrainingCompletion,Slot8_JobStarts,Slot8_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,Slot2_TotalTimeClaimedBusy,Slot2_TotalTimeUnclaimedIdle,Slot3_TotalTimeClaimedBusy,Slot3_TotalTimeUnclaimedIdle,Slot4_TotalTimeClaimedBusy,Slot4_TotalTimeUnclaimedIdle,Slot5_TotalTimeClaimedBusy,Slot5_TotalTimeUnclaimedIdle,Slot6_TotalTimeClaimedBusy,Slot6_TotalTimeUnclaimedIdle,Slot7_TotalTimeClaimedBusy,Slot7_TotalTimeUnclaimedIdle,Slot8_TotalTimeClaimedBusy,Slot8_TotalTimeUnclaimedIdle,Slot10_ExpectedMachineGracefulDrainingCompletion,Slot10_JobStarts,Slot10_SelfMonitorAge,Slot11_ExpectedMachineGracefulDrainingCompletion,Slot11_JobStarts,S
 lot11_SelfMonitorAge,Slot12_ExpectedMachineGracefulDrainingCompletion,Slot12_JobStarts,Slot12_SelfMonitorAge,Slot9_ExpectedMachineGracefulDrainingCompletion,Slot9_JobStarts,Slot9_SelfMonitorAge,Slot12_TotalTimeClaimedBusy,Slot10_TotalTimeClaimedBusy,Slot10_TotalTimeUnclaimedIdle,Slot11_TotalTimeClaimedBusy,Slot11_TotalTimeUnclaimedIdle,Slot12_TotalTimeUnclaimedIdle,Slot9_TotalTimeClaimedBusy,Slot9_TotalTimeUnclaimedIdle,Slot13_ExpectedMachineGracefulDrainingCompletion,Slot13_JobStarts,Slot13_SelfMonitorAge,Slot14_ExpectedMachineGracefulDrainingCompletion,Slot14_JobStarts,Slot14_SelfMonitorAge,Slot15_ExpectedMachineGracefulDrainingCompletion,Slot15_JobStarts,Slot15_SelfMonitorAge,Slot16_ExpectedMachineGracefulDrainingCompletion,Slot16_JobStarts,Slot16_SelfMonitorAge,IsResumable,WHEN_TO_TRANSFER_OUTPUT,_condor_Requestadmin_mutex_1,_condor_Requestadmin_mutex_2,_condor_Requestadmin_mutex_3,_condor_Requestmachine_token,Requestadmin_mutex_1,Requestadmin_mutex_2,Requestadmin_mutex_3,Reques
 tmachine_token,nyehle,IsBuildJob,IsMatlabBuildJob,TotalJobRunTime,NodeOnline,Slot13_TotalTimeClaimedBusy,Slot13_TotalTimeUnclaimedIdle,Slot14_TotalTimeClaimedBusy,Slot14_TotalTimeUnclaimedIdle,Slot15_TotalTimeClaimedBusy,Slot15_TotalTimeUnclaimedIdle,Slot16_TotalTimeClaimedBusy,Slot16_TotalTimeUnclaimedIdle,TmpIsFull,trResumable,RequiresCMSFrontier,Slot17_ExpectedMachineGracefulDrainingCompletion,Slot17_JobStarts,Slot17_SelfMonitorAge,Slot17_TotalTimeClaimedBusy,Slot17_TotalTimeUnclaimedIdle,Slot18_ExpectedMachineGracefulDrainingCompletion,Slot18_JobStarts,Slot18_SelfMonitorAge,Slot18_TotalTimeClaimedBusy,Slot18_TotalTimeUnclaimedIdle,Slot19_ExpectedMachineGracefulDrainingCompletion,Slot19_JobStarts,Slot19_SelfMonitorAge,Slot19_TotalTimeClaimedBusy,Slot19_TotalTimeUnclaimedIdle,Slot20_ExpectedMachineGracefulDrainingCompletion,Slot20_JobStarts,Slot20_SelfMonitorAge,Slot20_TotalTimeClaimedBusy,Slot20_TotalTimeUnclaimedIdle,Slot21_ExpectedMachineGracefulDrainingCompletion,Slot21_JobSta
 rts,Slot21_SelfMonitorAge,Slot21_TotalTimeClaimedBusy,Slot21_TotalTimeUnclaimedIdle,Slot22_ExpectedMachineGracefulDrainingCompletion,Slot22_JobStarts,Slot22_SelfMonitorAge,Slot22_TotalTimeClaimedBusy,Slot22_TotalTimeUnclaimedIdle,Slot23_ExpectedMachineGracefulDrainingCompletion,Slot23_JobStarts,Slot23_SelfMonitorAge,Slot23_TotalTimeClaimedBusy,Slot23_TotalTimeUnclaimedIdle,Slot24_ExpectedMachineGracefulDrainingCompletion,Slot24_JobStarts,Slot24_SelfMonitorAge,Slot24_TotalTimeClaimedBusy,Slot24_TotalTimeUnclaimedIdle,Slot25_ExpectedMachineGracefulDrainingCompletion,Slot25_JobStarts,Slot25_SelfMonitorAge,Slot25_TotalTimeClaimedBusy,Slot25_TotalTimeUnclaimedIdle,Slot26_ExpectedMachineGracefulDrainingCompletion,Slot26_JobStarts,Slot26_SelfMonitorAge,Slot26_TotalTimeClaimedBusy,Slot26_TotalTimeUnclaimedIdle,Slot27_ExpectedMachineGracefulDrainingCompletion,Slot27_JobStarts,Slot27_SelfMonitorAge,Slot27_TotalTimeClaimedBusy,Slot27_TotalTimeUnclaimedIdle,Slot28_ExpectedMachineGracefulDrainin
 gCompletion,Slot28_JobStarts,Slot28_SelfMonitorAge,Slot28_TotalTimeClaimedBusy,Slot28_TotalTimeUnclaimedIdle,Slot29_ExpectedMachineGracefulDrainingCompletion,Slot29_JobStarts,Slot29_SelfMonitorAge,Slot29_TotalTimeClaimedBusy,Slot29_TotalTimeUnclaimedIdle,Slot30_ExpectedMachineGracefulDrainingCompletion,Slot30_JobStarts,Slot30_SelfMonitorAge,Slot30_TotalTimeClaimedBusy,Slot30_TotalTimeUnclaimedIdle,Slot31_ExpectedMachineGracefulDrainingCompletion,Slot31_JobStarts,Slot31_SelfMonitorAge,Slot31_TotalTimeClaimedBusy,Slot31_TotalTimeUnclaimedIdle,Slot32_ExpectedMachineGracefulDrainingCompletion,Slot32_JobStarts,Slot32_SelfMonitorAge,Slot32_TotalTimeClaimedBusy,Slot32_TotalTimeUnclaimedIdle,ResidentSetSize";
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=net_est --unique=20111 --";
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        JobFinishedHookDone = 1446134021;
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "dentler@chtc.wisc.edu";
+        UserLog = "/home/dentler/ChtcRun/project_auction/results_fix2/20111/process.log";
+        JobCurrentStartDate = 1446133963;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        JobLeaseDuration = 2400;
+        QDate = 1446133922;
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "20111+20111";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 58;
+        AutoClusterId = 38259;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.244.37";
+        WantFlocking = true;
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 249656;
+        ExitBySignal = false;
+        DAGManJobId = 49583804;
+        EnteredCurrentStatus = 1446134021;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 249656;
+        TransferIn = false;
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446133963;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49583938.0#1446133922";
+        RemoteSysCpu = 7.000000000000000E+00;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 5.800000000000000E+01;
+        WantCheckpoint = false;
+        BlockReads = 16;
+        LastRemoteHost = "slot1_10@e168.chtc.wisc.edu";
+        TransferInput = "/home/dentler/ChtcRun/project_auction/20111/,/home/dentler/ChtcRun/project_auction/shared/";
+        LocalUserCpu = 0.0;
+        PeriodicRelease = ( JobStatus == 5 ) && ( ( CurrentTime - EnteredCurrentStatus ) > 1800 ) && ( JobRunCount < 5 ) && ( HoldReasonCode != 6 ) && ( HoldReasonCode != 14 ) && ( HoldReasonCode != 22 );
+        RequestDisk = 4000000;
+        ResidentSetSize_RAW = 5056;
+        OrigMaxHosts = 1;
+        LastPublicClaimId = "<128.105.244.37:57713>#1445396629#2313#...";
+        WantRHEL6 = true;
+        NumCkpts_RAW = 0;
+        Out = "process.out";
+        SubmitEventNotes = "DAG Node: 20111+20111";
+        CumulativeSlotTime = 5.800000000000000E+01;
+        JobRunCount = 1;
+        RecentBlockReads = 16;
+        StreamErr = false;
+        DiskUsage_RAW = 1205568;
+        NumCkpts = 0;
+        StatsLifetimeStarter = 52;
+        ImageSize = 7500
+    ]
+
+    [
+        BlockWrites = 0;
+        LastJobStatus = 2;
+        JobCurrentStartExecutingDate = 1446115115;
+        WantRemoteIO = true;
+        RequestCpus = 1;
+        NumShadowStarts = 1;
+        RemoteUserCpu = 1.878200000000000E+04;
+        NiceUser = false;
+        RequestMemory = 1000;
+        BytesRecvd = 2.846290000000000E+05;
+        ResidentSetSize = 125000;
+        StreamOut = false;
+        SpooledOutputFiles = "CURLTIME_2890029,ChtcWrapper260.out,AuditLog.260,simu_3_260.txt,harvest.log,260.out";
+        OnExitRemove = true;
+        ImageSize_RAW = 811948;
+        RemoteWallClockTime = 1.890300000000000E+04;
+        MachineAttrSlotWeight0 = 1;
+        ExecutableSize = 7;
+        JobStatus = 4;
+        DAGParentNodeNames = "";
+        ExitCode = 0;
+        DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27";
+        BytesSent = 3.050700000000000E+04;
+        LastSuspensionTime = 0;
+        ExecutableSize_RAW = 6;
+        RecentBlockReadKbytes = 0;
+        TransferInputSizeMB = 0;
+        Matlab = "R2011b";
+        BlockReadKbytes = 0;
+        RecentStatsLifetimeStarter = 1200;
+        LeaveJobInQueue = false;
+        TargetType = "Machine";
+        WhenToTransferOutput = "ON_EXIT";
+        Owner = "xguo23";
+        JobNotification = 0;
+        BufferSize = 524288;
+        RecentBlockWrites = 0;
+        CompletionDate = 1446134017;
+        QDate = 1446105803;
+        JobLeaseDuration = 2400;
+        JobFinishedHookDone = 1446134017;
+        LastMatchTime = 1446115114;
+        LastJobLeaseRenewal = 1446134017;
+        DAGManNodesLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/./mydag.dag.nodes.log";
+        ClusterId = 49582724;
+        JobUniverse = 5;
+        NumJobStarts = 1;
+        CondorVersion = "$CondorVersion: 8.5.0 Sep 16 2015 BuildID: 341710 $";
+        CoreSize = 0;
+        OnExitHold = false;
+        CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $";
+        In = "/dev/null";
+        DiskUsage = 1250000;
+        EncryptExecuteDirectory = false;
+        CommittedSuspensionTime = 0;
+        User = "xguo23@chtc.wisc.edu";
+        UserLog = "/home/xguo23/model_3_1.47/Simulation_condor/model_3/260/process.log";
+        JobCurrentStartDate = 1446115114;
+        BufferBlockSize = 32768;
+        MATCH_EXP_JOBGLIDEIN_ResourceName = "wisc.edu";
+        Requirements = ( ( OpSysMajorVer is 6 ) ) && ( MY.JobUniverse == 12 || MY.JobUniverse == 7 || MY.WantFlocking || MY.WantGlidein || TARGET.PoolName == "CHTC" || TARGET.COLLECTOR_HOST_STRING == "infopool.cs.wisc.edu" ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer );
+        MinHosts = 1;
+        MaxHosts = 1;
+        Args = "--type=Matlab --version=R2011b --cmdtorun=simu_condor --unique=260 -- 3";
+        PeriodicHold = false;
+        ProcId = 0;
+        Environment = "";
+        DAGNodeName = "260+260";
+        MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 );
+        TerminationPending = true;
+        NumRestarts = 0;
+        NumSystemHolds = 0;
+        CommittedTime = 18903;
+        MachineAttrCpus0 = 1;
+        WantRemoteSyscalls = false;
+        MyType = "Job";
+        CumulativeSuspensionTime = 0;
+        Rank = 0.0;
+        StartdPrincipal = "execute-side@matchsession/128.105.245.164";
+        Err = "process.err";
+        PeriodicRemove = false;
+        BlockWriteKbytes = 0;
+        ExitBySignal = false;
+        DAGManJobId = 49582200;
+        EnteredCurrentStatus = 1446134017;
+        JOBGLIDEIN_ResourceName = "$$([IfThenElse(IsUndefined(TARGET.GLIDEIN_ResourceName), IfThenElse(IsUndefined(TARGET.GLIDEIN_Site), \"wisc.edu\", TARGET.GLIDEIN_Site), TARGET.GLIDEIN_ResourceName)])";
+        RecentBlockWriteKbytes = 0;
+        TransferIn = false;
+        ExitStatus = 0;
+        ShouldTransferFiles = "YES";
+        IsCHTCSubmit = true;
+        NumJobMatches = 1;
+        RootDir = "/";
+        JobStartDate = 1446115114;
+        JobPrio = 0;
+        CurrentHosts = 0;
+        GlobalJobId = "submit-3.chtc.wisc.edu#49582724.0#1446105803";
+        RemoteSysCpu = 1.090000000000000E+02;
+        TotalSuspensions = 0;
+        CommittedSlotTime = 1.890300000000000E+04;
+        WantCheckpoint = false;
+        BlockReads = 0;
+        LastRe

<TRUNCATED>


[22/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java b/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
deleted file mode 100644
index 6a036c0..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/file/FeedOperations.java
+++ /dev/null
@@ -1,254 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.file;
-
-import java.util.Collection;
-import java.util.List;
-
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.external.api.IAdapterFactory;
-import org.apache.asterix.external.feed.api.IFeedJoint;
-import org.apache.asterix.external.feed.api.IFeedMessage;
-import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
-import org.apache.asterix.external.feed.management.FeedConnectionId;
-import org.apache.asterix.external.feed.management.FeedId;
-import org.apache.asterix.external.feed.message.EndFeedMessage;
-import org.apache.asterix.external.feed.message.FeedTupleCommitResponseMessage;
-import org.apache.asterix.external.feed.message.PrepareStallMessage;
-import org.apache.asterix.external.feed.message.TerminateDataFlowMessage;
-import org.apache.asterix.external.feed.message.ThrottlingEnabledFeedMessage;
-import org.apache.asterix.external.feed.policy.FeedPolicyAccessor;
-import org.apache.asterix.external.feed.watch.FeedConnectJobInfo;
-import org.apache.asterix.external.operators.FeedMessageOperatorDescriptor;
-import org.apache.asterix.external.util.FeedConstants;
-import org.apache.asterix.feed.FeedLifecycleListener;
-import org.apache.asterix.metadata.declared.AqlMetadataProvider;
-import org.apache.asterix.metadata.entities.Feed;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartitionConstraint;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
-import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
-import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
-import org.apache.hyracks.algebricks.common.utils.Pair;
-import org.apache.hyracks.algebricks.common.utils.Triple;
-import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
-import org.apache.hyracks.api.job.JobSpecification;
-import org.apache.hyracks.dataflow.std.connectors.OneToOneConnectorDescriptor;
-import org.apache.hyracks.dataflow.std.misc.NullSinkOperatorDescriptor;
-
-/**
- * Provides helper method(s) for creating JobSpec for operations on a feed.
- */
-public class FeedOperations {
-
-    /**
-     * Builds the job spec for ingesting a (primary) feed from its external source via the feed adaptor.
-     * @param primaryFeed
-     * @param metadataProvider
-     * @return JobSpecification the Hyracks job specification for receiving data from external source
-     * @throws Exception
-     */
-    public static Pair<JobSpecification, IAdapterFactory> buildFeedIntakeJobSpec(Feed primaryFeed,
-            AqlMetadataProvider metadataProvider, FeedPolicyAccessor policyAccessor) throws Exception {
-
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        spec.setFrameSize(FeedConstants.JobConstants.DEFAULT_FRAME_SIZE);
-        IAdapterFactory adapterFactory = null;
-        IOperatorDescriptor feedIngestor;
-        AlgebricksPartitionConstraint ingesterPc;
-
-        try {
-            Triple<IOperatorDescriptor, AlgebricksPartitionConstraint, IAdapterFactory> t = metadataProvider
-                    .buildFeedIntakeRuntime(spec, primaryFeed, policyAccessor);
-            feedIngestor = t.first;
-            ingesterPc = t.second;
-            adapterFactory = t.third;
-        } catch (AlgebricksException e) {
-            e.printStackTrace();
-            throw new AsterixException(e);
-        }
-
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedIngestor, ingesterPc);
-
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, ingesterPc);
-        spec.connect(new OneToOneConnectorDescriptor(spec), feedIngestor, 0, nullSink, 0);
-        spec.addRoot(nullSink);
-        return new Pair<JobSpecification, IAdapterFactory>(spec, adapterFactory);
-    }
-
-    public static JobSpecification buildDiscontinueFeedSourceSpec(AqlMetadataProvider metadataProvider, FeedId feedId)
-            throws AsterixException, AlgebricksException {
-
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IOperatorDescriptor feedMessenger = null;
-        AlgebricksPartitionConstraint messengerPc = null;
-
-        List<String> locations = FeedLifecycleListener.INSTANCE.getIntakeLocations(feedId);
-        Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDiscontinueFeedMessengerRuntime(spec, feedId,
-                locations);
-
-        feedMessenger = p.first;
-        messengerPc = p.second;
-
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
-        spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
-        spec.addRoot(nullSink);
-
-        return spec;
-    }
-
-    /**
-     * Builds the job spec for sending message to an active feed to disconnect it from the
-     * its source.
-     */
-    public static Pair<JobSpecification, Boolean> buildDisconnectFeedJobSpec(AqlMetadataProvider metadataProvider,
-            FeedConnectionId connectionId) throws AsterixException, AlgebricksException {
-
-        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
-        IOperatorDescriptor feedMessenger;
-        AlgebricksPartitionConstraint messengerPc;
-        List<String> locations = null;
-        FeedRuntimeType sourceRuntimeType;
-        try {
-            FeedConnectJobInfo cInfo = FeedLifecycleListener.INSTANCE.getFeedConnectJobInfo(connectionId);
-            IFeedJoint sourceFeedJoint = cInfo.getSourceFeedJoint();
-            IFeedJoint computeFeedJoint = cInfo.getComputeFeedJoint();
-
-            boolean terminateIntakeJob = false;
-            boolean completeDisconnect = computeFeedJoint == null || computeFeedJoint.getReceivers().isEmpty();
-            if (completeDisconnect) {
-                sourceRuntimeType = FeedRuntimeType.INTAKE;
-                locations = cInfo.getCollectLocations();
-                terminateIntakeJob = sourceFeedJoint.getReceivers().size() == 1;
-            } else {
-                locations = cInfo.getComputeLocations();
-                sourceRuntimeType = FeedRuntimeType.COMPUTE;
-            }
-
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = buildDisconnectFeedMessengerRuntime(spec,
-                    connectionId, locations, sourceRuntimeType, completeDisconnect, sourceFeedJoint.getOwnerFeedId());
-
-            feedMessenger = p.first;
-            messengerPc = p.second;
-
-            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, feedMessenger, messengerPc);
-            NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(spec);
-            AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, nullSink, messengerPc);
-            spec.connect(new OneToOneConnectorDescriptor(spec), feedMessenger, 0, nullSink, 0);
-            spec.addRoot(nullSink);
-            return new Pair<JobSpecification, Boolean>(spec, terminateIntakeJob);
-
-        } catch (AlgebricksException e) {
-            throw new AsterixException(e);
-        }
-
-    }
-
-    public static JobSpecification buildPrepareStallMessageJob(PrepareStallMessage stallMessage,
-            Collection<String> collectLocations) throws AsterixException {
-        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
-        try {
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
-                    messageJobSpec, stallMessage.getConnectionId(), stallMessage, collectLocations);
-            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
-        return messageJobSpec;
-    }
-
-    public static JobSpecification buildNotifyThrottlingEnabledMessageJob(
-            ThrottlingEnabledFeedMessage throttlingEnabledMesg, Collection<String> locations) throws AsterixException {
-        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
-        try {
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
-                    messageJobSpec, throttlingEnabledMesg.getConnectionId(), throttlingEnabledMesg, locations);
-            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
-        return messageJobSpec;
-    }
-
-    public static JobSpecification buildTerminateFlowMessageJob(TerminateDataFlowMessage terminateMessage,
-            List<String> collectLocations) throws AsterixException {
-        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
-        try {
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
-                    messageJobSpec, terminateMessage.getConnectionId(), terminateMessage, collectLocations);
-            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
-        return messageJobSpec;
-    }
-
-    public static JobSpecification buildCommitAckResponseJob(FeedTupleCommitResponseMessage commitResponseMessage,
-            Collection<String> targetLocations) throws AsterixException {
-        JobSpecification messageJobSpec = JobSpecificationUtils.createJobSpecification();
-        try {
-            Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> p = FeedOperations.buildSendFeedMessageRuntime(
-                    messageJobSpec, commitResponseMessage.getConnectionId(), commitResponseMessage, targetLocations);
-            buildSendFeedMessageJobSpec(p.first, p.second, messageJobSpec);
-        } catch (AlgebricksException ae) {
-            throw new AsterixException(ae);
-        }
-        return messageJobSpec;
-    }
-
-    public static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDiscontinueFeedMessengerRuntime(
-            JobSpecification jobSpec, FeedId feedId, List<String> locations) throws AlgebricksException {
-        FeedConnectionId feedConnectionId = new FeedConnectionId(feedId, null);
-        IFeedMessage feedMessage = new EndFeedMessage(feedConnectionId, FeedRuntimeType.INTAKE,
-                feedConnectionId.getFeedId(), true, EndFeedMessage.EndMessageType.DISCONTINUE_SOURCE);
-        return buildSendFeedMessageRuntime(jobSpec, feedConnectionId, feedMessage, locations);
-    }
-
-    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildSendFeedMessageRuntime(
-            JobSpecification jobSpec, FeedConnectionId feedConenctionId, IFeedMessage feedMessage,
-            Collection<String> locations) throws AlgebricksException {
-        AlgebricksPartitionConstraint partitionConstraint = new AlgebricksAbsolutePartitionConstraint(
-                locations.toArray(new String[] {}));
-        FeedMessageOperatorDescriptor feedMessenger = new FeedMessageOperatorDescriptor(jobSpec, feedConenctionId,
-                feedMessage);
-        return new Pair<IOperatorDescriptor, AlgebricksPartitionConstraint>(feedMessenger, partitionConstraint);
-    }
-
-    private static JobSpecification buildSendFeedMessageJobSpec(IOperatorDescriptor operatorDescriptor,
-            AlgebricksPartitionConstraint messengerPc, JobSpecification messageJobSpec) {
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, operatorDescriptor,
-                messengerPc);
-        NullSinkOperatorDescriptor nullSink = new NullSinkOperatorDescriptor(messageJobSpec);
-        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(messageJobSpec, nullSink, messengerPc);
-        messageJobSpec.connect(new OneToOneConnectorDescriptor(messageJobSpec), operatorDescriptor, 0, nullSink, 0);
-        messageJobSpec.addRoot(nullSink);
-        return messageJobSpec;
-    }
-
-    private static Pair<IOperatorDescriptor, AlgebricksPartitionConstraint> buildDisconnectFeedMessengerRuntime(
-            JobSpecification jobSpec, FeedConnectionId feedConenctionId, List<String> locations,
-            FeedRuntimeType sourceFeedRuntimeType, boolean completeDisconnection, FeedId sourceFeedId)
-                    throws AlgebricksException {
-        IFeedMessage feedMessage = new EndFeedMessage(feedConenctionId, sourceFeedRuntimeType, sourceFeedId,
-                completeDisconnection, EndFeedMessage.EndMessageType.DISCONNECT_FEED);
-        return buildSendFeedMessageRuntime(jobSpec, feedConenctionId, feedMessage, locations);
-    }
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
index 54cac09..24df771 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryIndexOperationsHelper.java
@@ -24,6 +24,7 @@ import java.io.IOException;
 import java.util.List;
 import java.util.Map;
 
+import org.apache.asterix.app.external.ExternalIndexingOperations;
 import org.apache.asterix.common.config.AsterixStorageProperties;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
index 83f91a0..be1c356 100644
--- a/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
+++ b/asterix-app/src/main/java/org/apache/asterix/file/SecondaryRTreeOperationsHelper.java
@@ -20,6 +20,7 @@ package org.apache.asterix.file;
 
 import java.util.List;
 
+import org.apache.asterix.app.external.ExternalIndexingOperations;
 import org.apache.asterix.common.api.ILocalResourceMetadata;
 import org.apache.asterix.common.config.AsterixStorageProperties;
 import org.apache.asterix.common.config.DatasetConfig.DatasetType;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
index adf0a4d..e683ef4 100644
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/CCApplicationEntryPoint.java
@@ -34,6 +34,9 @@ import org.apache.asterix.api.http.servlet.QueryStatusAPIServlet;
 import org.apache.asterix.api.http.servlet.ShutdownAPIServlet;
 import org.apache.asterix.api.http.servlet.UpdateAPIServlet;
 import org.apache.asterix.api.http.servlet.VersionAPIServlet;
+import org.apache.asterix.app.external.CentralFeedManager;
+import org.apache.asterix.app.external.ExternalLibraryUtils;
+import org.apache.asterix.app.external.FeedLifecycleListener;
 import org.apache.asterix.common.api.AsterixThreadFactory;
 import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
 import org.apache.asterix.common.config.AsterixExternalProperties;
@@ -43,8 +46,6 @@ import org.apache.asterix.compiler.provider.AqlCompilationProvider;
 import org.apache.asterix.compiler.provider.SqlppCompilationProvider;
 import org.apache.asterix.event.service.ILookupService;
 import org.apache.asterix.external.feed.api.ICentralFeedManager;
-import org.apache.asterix.feed.CentralFeedManager;
-import org.apache.asterix.feed.FeedLifecycleListener;
 import org.apache.asterix.messaging.CCMessageBroker;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.api.IAsterixStateProxy;
@@ -110,7 +111,7 @@ public class CCApplicationEntryPoint implements ICCApplicationEntryPoint {
         setupFeedServer(externalProperties);
         feedServer.start();
 
-        ExternalLibraryBootstrap.setUpExternaLibraries(false);
+        ExternalLibraryUtils.setUpExternaLibraries(false);
         centralFeedManager = CentralFeedManager.getInstance();
         centralFeedManager.start();
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
deleted file mode 100755
index b0dfd58..0000000
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/ExternalLibraryBootstrap.java
+++ /dev/null
@@ -1,325 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements.  See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership.  The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License.  You may obtain a copy of the License at
- *
- *   http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-package org.apache.asterix.hyracks.bootstrap;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.net.URL;
-import java.net.URLClassLoader;
-import java.rmi.RemoteException;
-import java.util.ArrayList;
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-import java.util.logging.Level;
-import java.util.logging.Logger;
-
-import javax.xml.bind.JAXBContext;
-import javax.xml.bind.Unmarshaller;
-
-import org.apache.asterix.common.exceptions.ACIDException;
-import org.apache.asterix.common.exceptions.AsterixException;
-import org.apache.asterix.common.functions.FunctionSignature;
-import org.apache.asterix.external.api.IDataSourceAdapter;
-import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
-import org.apache.asterix.external.library.ExternalLibrary;
-import org.apache.asterix.external.library.ExternalLibraryManager;
-import org.apache.asterix.external.library.LibraryAdapter;
-import org.apache.asterix.external.library.LibraryFunction;
-import org.apache.asterix.metadata.MetadataManager;
-import org.apache.asterix.metadata.MetadataTransactionContext;
-import org.apache.asterix.metadata.api.IMetadataEntity;
-import org.apache.asterix.metadata.entities.DatasourceAdapter;
-import org.apache.asterix.metadata.entities.Dataverse;
-import org.apache.asterix.metadata.entities.Library;
-import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
-
-public class ExternalLibraryBootstrap {
-
-    private static Logger LOGGER = Logger.getLogger(ExternalLibraryBootstrap.class.getName());
-
-    public static void setUpExternaLibraries(boolean isMetadataNode) throws Exception {
-
-        Map<String, List<String>> uninstalledLibs = null;
-        if (isMetadataNode) {
-            uninstalledLibs = uninstallLibraries();
-        }
-
-        File installLibDir = getLibraryInstallDir();
-        if (installLibDir.exists()) {
-            for (String dataverse : installLibDir.list()) {
-                File dataverseDir = new File(installLibDir, dataverse);
-                String[] libraries = dataverseDir.list();
-                for (String library : libraries) {
-                    registerLibrary(dataverse, library, isMetadataNode, installLibDir);
-                    if (isMetadataNode) {
-                        File libraryDir = new File(installLibDir.getAbsolutePath() + File.separator + dataverse
-                                + File.separator + library);
-                        installLibraryIfNeeded(dataverse, libraryDir, uninstalledLibs);
-                    }
-                }
-            }
-        }
-    }
-
-    private static Map<String, List<String>> uninstallLibraries() throws Exception {
-        Map<String, List<String>> uninstalledLibs = new HashMap<String, List<String>>();
-        File uninstallLibDir = getLibraryUninstallDir();
-        String[] uninstallLibNames;
-        if (uninstallLibDir.exists()) {
-            uninstallLibNames = uninstallLibDir.list();
-            for (String uninstallLibName : uninstallLibNames) {
-                String[] components = uninstallLibName.split("\\.");
-                String dataverse = components[0];
-                String libName = components[1];
-                uninstallLibrary(dataverse, libName);
-                new File(uninstallLibDir, uninstallLibName).delete();
-                List<String> uinstalledLibsInDv = uninstalledLibs.get(dataverse);
-                if (uinstalledLibsInDv == null) {
-                    uinstalledLibsInDv = new ArrayList<String>();
-                    uninstalledLibs.put(dataverse, uinstalledLibsInDv);
-                }
-                uinstalledLibsInDv.add(libName);
-            }
-        }
-        return uninstalledLibs;
-    }
-
-    private static boolean uninstallLibrary(String dataverse, String libraryName)
-            throws AsterixException, RemoteException, ACIDException {
-        MetadataTransactionContext mdTxnCtx = null;
-        try {
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
-            if (dv == null) {
-                return false;
-            }
-
-            org.apache.asterix.metadata.entities.Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx,
-                    dataverse, libraryName);
-            if (library == null) {
-                return false;
-            }
-
-            List<org.apache.asterix.metadata.entities.Function> functions = MetadataManager.INSTANCE
-                    .getDataverseFunctions(mdTxnCtx, dataverse);
-            for (org.apache.asterix.metadata.entities.Function function : functions) {
-                if (function.getName().startsWith(libraryName + "#")) {
-                    MetadataManager.INSTANCE.dropFunction(mdTxnCtx,
-                            new FunctionSignature(dataverse, function.getName(), function.getArity()));
-                }
-            }
-
-            List<org.apache.asterix.metadata.entities.DatasourceAdapter> adapters = MetadataManager.INSTANCE
-                    .getDataverseAdapters(mdTxnCtx, dataverse);
-            for (org.apache.asterix.metadata.entities.DatasourceAdapter adapter : adapters) {
-                if (adapter.getAdapterIdentifier().getName().startsWith(libraryName + "#")) {
-                    MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier().getName());
-                }
-            }
-
-            MetadataManager.INSTANCE.dropLibrary(mdTxnCtx, dataverse, libraryName);
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-            throw new AsterixException(e);
-        }
-        return true;
-    }
-
-    // Each element of a library is installed as part of a transaction. Any
-    // failure in installing an element does not effect installation of other
-    // libraries
-    private static void installLibraryIfNeeded(String dataverse, final File libraryDir,
-            Map<String, List<String>> uninstalledLibs) throws Exception {
-
-        String libraryName = libraryDir.getName().trim();
-        List<String> uninstalledLibsInDv = uninstalledLibs.get(dataverse);
-        boolean wasUninstalled = uninstalledLibsInDv != null && uninstalledLibsInDv.contains(libraryName);
-
-        MetadataTransactionContext mdTxnCtx = null;
-        MetadataManager.INSTANCE.acquireWriteLatch();
-        try {
-            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
-            org.apache.asterix.metadata.entities.Library libraryInMetadata = MetadataManager.INSTANCE
-                    .getLibrary(mdTxnCtx, dataverse, libraryName);
-            if (libraryInMetadata != null && !wasUninstalled) {
-                return;
-            }
-
-            String[] libraryDescriptors = libraryDir.list(new FilenameFilter() {
-                @Override
-                public boolean accept(File dir, String name) {
-                    return name.endsWith(".xml");
-                }
-            });
-
-            ExternalLibrary library = getLibrary(new File(libraryDir + File.separator + libraryDescriptors[0]));
-
-            if (libraryDescriptors.length == 0) {
-                throw new Exception("No library descriptor defined");
-            } else if (libraryDescriptors.length > 1) {
-                throw new Exception("More than 1 library descriptors defined");
-            }
-
-            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
-            if (dv == null) {
-                MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverse,
-                        NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, IMetadataEntity.PENDING_NO_OP));
-            }
-            if (library.getLibraryFunctions() != null) {
-                for (LibraryFunction function : library.getLibraryFunctions().getLibraryFunction()) {
-                    String[] fargs = function.getArguments().trim().split(",");
-                    List<String> args = new ArrayList<String>();
-                    for (String arg : fargs) {
-                        args.add(arg);
-                    }
-                    org.apache.asterix.metadata.entities.Function f = new org.apache.asterix.metadata.entities.Function(
-                            dataverse, libraryName + "#" + function.getName().trim(), args.size(), args,
-                            function.getReturnType().trim(), function.getDefinition().trim(),
-                            library.getLanguage().trim(), function.getFunctionType().trim());
-                    MetadataManager.INSTANCE.addFunction(mdTxnCtx, f);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Installed function: " + libraryName + "#" + function.getName().trim());
-                    }
-                }
-            }
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Installed functions contain in library :" + libraryName);
-            }
-
-            if (library.getLibraryAdapters() != null) {
-                for (LibraryAdapter adapter : library.getLibraryAdapters().getLibraryAdapter()) {
-                    String adapterFactoryClass = adapter.getFactoryClass().trim();
-                    String adapterName = libraryName + "#" + adapter.getName().trim();
-                    AdapterIdentifier aid = new AdapterIdentifier(dataverse, adapterName);
-                    DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass,
-                            IDataSourceAdapter.AdapterType.EXTERNAL);
-                    MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
-                    if (LOGGER.isLoggable(Level.INFO)) {
-                        LOGGER.info("Installed adapter: " + adapterName);
-                    }
-                }
-            }
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Installed adapters contain in library :" + libraryName);
-            }
-
-            MetadataManager.INSTANCE.addLibrary(mdTxnCtx, new Library(dataverse, libraryName));
-
-            if (LOGGER.isLoggable(Level.INFO)) {
-                LOGGER.info("Added library " + libraryName + "to Metadata");
-            }
-
-            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
-        } catch (Exception e) {
-            e.printStackTrace();
-            if (LOGGER.isLoggable(Level.SEVERE)) {
-                LOGGER.info("Exception in installing library " + libraryName);
-            }
-            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
-        } finally {
-            MetadataManager.INSTANCE.releaseWriteLatch();
-        }
-    }
-
-    private static void registerLibrary(String dataverse, String libraryName, boolean isMetadataNode,
-            File installLibDir) throws Exception {
-        ClassLoader classLoader = getLibraryClassLoader(dataverse, libraryName);
-        ExternalLibraryManager.registerLibraryClassLoader(dataverse, libraryName, classLoader);
-    }
-
-    private static ExternalLibrary getLibrary(File libraryXMLPath) throws Exception {
-        JAXBContext configCtx = JAXBContext.newInstance(ExternalLibrary.class);
-        Unmarshaller unmarshaller = configCtx.createUnmarshaller();
-        ExternalLibrary library = (ExternalLibrary) unmarshaller.unmarshal(libraryXMLPath);
-        return library;
-    }
-
-    private static ClassLoader getLibraryClassLoader(String dataverse, String libraryName) throws Exception {
-
-        File installDir = getLibraryInstallDir();
-        if (LOGGER.isLoggable(Level.INFO)) {
-            LOGGER.info("Installing lirbary " + libraryName + " in dataverse " + dataverse + "."
-                    + " Install Directory: " + installDir.getAbsolutePath());
-        }
-
-        File libDir = new File(
-                installDir.getAbsolutePath() + File.separator + dataverse + File.separator + libraryName);
-        FilenameFilter jarFileFilter = new FilenameFilter() {
-            @Override
-            public boolean accept(File dir, String name) {
-                return name.endsWith(".jar");
-            }
-        };
-
-        String[] jarsInLibDir = libDir.list(jarFileFilter);
-        if (jarsInLibDir.length > 1) {
-            throw new Exception("Incorrect library structure: found multiple library jars");
-        }
-        if (jarsInLibDir.length < 0) {
-            throw new Exception("Incorrect library structure: could not find library jar");
-        }
-
-        File libJar = new File(libDir, jarsInLibDir[0]);
-        File libDependencyDir = new File(libDir.getAbsolutePath() + File.separator + "lib");
-        int numDependencies = 1;
-        String[] libraryDependencies = null;
-        if (libDependencyDir.exists()) {
-            libraryDependencies = libDependencyDir.list(jarFileFilter);
-            numDependencies += libraryDependencies.length;
-        }
-
-        ClassLoader parentClassLoader = ExternalLibraryBootstrap.class.getClassLoader();
-        URL[] urls = new URL[numDependencies];
-        int count = 0;
-        urls[count++] = libJar.toURI().toURL();
-
-        if (libraryDependencies != null && libraryDependencies.length > 0) {
-            for (String dependency : libraryDependencies) {
-                File file = new File(libDependencyDir + File.separator + dependency);
-                urls[count++] = file.toURI().toURL();
-            }
-        }
-
-        if (LOGGER.isLoggable(Level.INFO)) {
-            StringBuilder logMesg = new StringBuilder("Classpath for library " + libraryName + "\n");
-            for (URL url : urls) {
-                logMesg.append(url.getFile() + "\n");
-            }
-            LOGGER.info(logMesg.toString());
-        }
-
-        ClassLoader classLoader = new URLClassLoader(urls, parentClassLoader);
-        return classLoader;
-    }
-
-    private static File getLibraryInstallDir() {
-        String workingDir = System.getProperty("user.dir");
-        return new File(workingDir + File.separator + "library");
-    }
-
-    private static File getLibraryUninstallDir() {
-        String workingDir = System.getProperty("user.dir");
-        return new File(workingDir + File.separator + "uninstall");
-    }
-
-}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
index d5f1a51..a6be075 100644
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/FeedBootstrap.java
@@ -18,9 +18,9 @@
  */
 package org.apache.asterix.hyracks.bootstrap;
 
+import org.apache.asterix.app.external.CentralFeedManager;
 import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.external.util.FeedConstants;
-import org.apache.asterix.feed.CentralFeedManager;
 import org.apache.asterix.om.types.BuiltinType;
 import org.apache.asterix.om.types.IAType;
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
index 2bac1cf..8132d4b 100644
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/GlobalRecoveryManager.java
@@ -23,6 +23,8 @@ import java.util.Set;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.asterix.app.external.CentralFeedManager;
+import org.apache.asterix.app.external.ExternalIndexingOperations;
 import org.apache.asterix.common.api.IClusterManagementWork;
 import org.apache.asterix.common.api.IClusterManagementWork.ClusterState;
 import org.apache.asterix.common.api.IClusterManagementWorkResponse;
@@ -32,8 +34,6 @@ import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransaction
 import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
 import org.apache.asterix.common.config.MetadataConstants;
 import org.apache.asterix.external.indexing.ExternalFile;
-import org.apache.asterix.feed.CentralFeedManager;
-import org.apache.asterix.file.ExternalIndexingOperations;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
index fcb196d..4922ae6 100644
--- a/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
+++ b/asterix-app/src/main/java/org/apache/asterix/hyracks/bootstrap/NCApplicationEntryPoint.java
@@ -26,6 +26,7 @@ import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import org.apache.asterix.api.common.AsterixAppRuntimeContext;
+import org.apache.asterix.app.external.ExternalLibraryUtils;
 import org.apache.asterix.common.api.AsterixThreadFactory;
 import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
 import org.apache.asterix.common.config.AsterixMetadataProperties;
@@ -212,7 +213,7 @@ public class NCApplicationEntryPoint implements INCApplicationEntryPoint {
         if (isMetadataNode && !pendingFailbackCompletion) {
             runtimeContext.initializeMetadata(systemState == SystemState.NEW_UNIVERSE);
         }
-        ExternalLibraryBootstrap.setUpExternaLibraries(isMetadataNode && !pendingFailbackCompletion);
+        ExternalLibraryUtils.setUpExternaLibraries(isMetadataNode && !pendingFailbackCompletion);
 
         if (LOGGER.isLoggable(Level.INFO)) {
             LOGGER.info("Starting lifecycle components");

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java b/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
index 1625c2b..023de30 100644
--- a/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
+++ b/asterix-app/src/test/java/org/apache/asterix/api/http/servlet/ConnectorAPIServletTest.java
@@ -36,7 +36,7 @@ import javax.servlet.ServletContext;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.asterix.feed.CentralFeedManager;
+import org.apache.asterix.app.external.CentralFeedManager;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/java/org/apache/asterix/app/external/TestLibrarian.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/app/external/TestLibrarian.java b/asterix-app/src/test/java/org/apache/asterix/app/external/TestLibrarian.java
new file mode 100644
index 0000000..4f8fa6f
--- /dev/null
+++ b/asterix-app/src/test/java/org/apache/asterix/app/external/TestLibrarian.java
@@ -0,0 +1,76 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.io.File;
+import java.io.IOException;
+import java.rmi.RemoteException;
+import java.util.HashMap;
+
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.event.service.AsterixEventServiceUtil;
+import org.apache.asterix.test.aql.ITestLibrarian;
+import org.apache.commons.io.FileUtils;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class TestLibrarian implements ITestLibrarian {
+
+    public static final String LIBRARY_DIR_NAME = "library";
+
+    @Override
+    public void install(String dvName, String libName, String libPath) throws Exception {
+        // get the directory of the to be installed libraries
+        File installLibDir = ExternalLibraryUtils.getLibraryInstallDir();
+        // directory exists?
+        if (!installLibDir.exists()) {
+            installLibDir.mkdir();
+        }
+        // copy the library file into the directory
+        File destinationDir = new File(
+                installLibDir.getAbsolutePath() + File.separator + dvName + File.separator + libName);
+        FileUtils.deleteQuietly(destinationDir);
+        destinationDir.mkdirs();
+        try {
+            AsterixEventServiceUtil.unzip(libPath, destinationDir.getAbsolutePath());
+        } catch (Exception e) {
+
+            throw new Exception("Couldn't unzip the file: " + libPath, e);
+        }
+        // for each file (library), register library
+        ExternalLibraryUtils.registerLibrary(dvName, libName, true, destinationDir);
+        // get library file
+        // install if needed (add functions, adapters, datasources, parsers to the metadata)
+        // <Not required for use>
+        ExternalLibraryUtils.installLibraryIfNeeded(dvName, destinationDir, new HashMap<>());
+    }
+
+    @Override
+    public void uninstall(String dvName, String libName) throws RemoteException, AsterixException, ACIDException {
+        ExternalLibraryUtils.uninstallLibrary(dvName, libName);
+    }
+
+    public static void removeLibraryDir() throws IOException {
+        File installLibDir = ExternalLibraryUtils.getLibraryInstallDir();
+        if (!installLibDir.getAbsolutePath().endsWith(LIBRARY_DIR_NAME)) {
+            throw new HyracksDataException("Invalid library directory");
+        }
+        FileUtils.deleteQuietly(installLibDir);
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java b/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
index 21fc240..976ca70 100644
--- a/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/ExecutionTest.java
@@ -23,6 +23,7 @@ import java.util.ArrayList;
 import java.util.Collection;
 import java.util.logging.Logger;
 
+import org.apache.asterix.app.external.TestLibrarian;
 import org.apache.asterix.common.config.AsterixTransactionProperties;
 import org.apache.asterix.test.aql.TestExecutor;
 import org.apache.asterix.testframework.context.TestCaseContext;
@@ -57,6 +58,9 @@ public class ExecutionTest {
         try {
             File outdir = new File(PATH_ACTUAL);
             outdir.mkdirs();
+            // remove library directory
+            TestLibrarian.removeLibraryDir();
+            testExecutor.setLibrarian(new TestLibrarian());
             ExecutionTestUtil.setUp();
         } catch (Throwable th) {
             th.printStackTrace();
@@ -66,6 +70,8 @@ public class ExecutionTest {
 
     @AfterClass
     public static void tearDown() throws Exception {
+        // remove library directory
+        TestLibrarian.removeLibraryDir();
         ExecutionTestUtil.tearDown();
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java b/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
index 860bac3..b577d29 100644
--- a/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
+++ b/asterix-app/src/test/java/org/apache/asterix/test/runtime/RepeatedTest.java
@@ -23,6 +23,10 @@ import java.lang.annotation.RetentionPolicy;
 import java.lang.annotation.Target;
 import java.util.Collection;
 
+import org.apache.asterix.app.external.TestLibrarian;
+import org.apache.asterix.test.aql.TestExecutor;
+import org.apache.asterix.test.runtime.RepeatRule.Repeat;
+import org.apache.asterix.testframework.context.TestCaseContext;
 import org.junit.Rule;
 import org.junit.Test;
 import org.junit.rules.MethodRule;
@@ -32,10 +36,6 @@ import org.junit.runners.Parameterized.Parameters;
 import org.junit.runners.model.FrameworkMethod;
 import org.junit.runners.model.Statement;
 
-import org.apache.asterix.test.aql.TestExecutor;
-import org.apache.asterix.test.runtime.RepeatRule.Repeat;
-import org.apache.asterix.testframework.context.TestCaseContext;
-
 /**
  * Runs runtime test cases that have been identified in the repeatedtestsuite.xml.
  * Each test is run 10000 times.
@@ -94,14 +94,16 @@ public class RepeatedTest extends ExecutionTest {
 
     public RepeatedTest(TestCaseContext tcCtx) {
         super(tcCtx);
+        testExecutor.setLibrarian(new TestLibrarian());
         count = 0;
     }
 
     @Rule
     public RepeatRule repeatRule = new RepeatRule();
 
+    @Override
     @Test
-    @Repeat(times = 10000)
+    @Repeat(times = 100)
     public void test() throws Exception {
         System.err.println("***** Test Count: " + (++count) + " ******");
         testExecutor.executeTest(PATH_ACTUAL, tcCtx, null, false);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.1.ddl.aql
new file mode 100644
index 0000000..21c8ac6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.1.ddl.aql
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create an adapter that uses external parser to parse data from files
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+
+drop dataverse externallibtest if exists;
+create dataverse externallibtest;
+use dataverse externallibtest;
+
+create type Classad as open {
+GlobalJobId: string
+};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.2.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.2.lib.aql
new file mode 100644
index 0000000..0290611
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.2.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql
new file mode 100644
index 0000000..9d93457
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.3.ddl.aql
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use dataverse externallibtest;
+
+create external dataset Condor(Classad) using localfs(
+("path"="asterix_nc1://data/external-parser/jobads.new"),
+("reader"="semi-structured"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"),
+("reader-stream"="localfs"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.4.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.4.query.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.4.query.aql
new file mode 100644
index 0000000..9d5d499
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.4.query.aql
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse externallibtest;
+
+for $x in dataset Condor
+order by $x.GlobalJobId
+return $x;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.5.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.5.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.5.lib.aql
new file mode 100644
index 0000000..86af80f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser/classad-parser.5.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.1.ddl.aql
new file mode 100644
index 0000000..21c8ac6
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.1.ddl.aql
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create an adapter that uses external parser to parse data from files
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+
+drop dataverse externallibtest if exists;
+create dataverse externallibtest;
+use dataverse externallibtest;
+
+create type Classad as open {
+GlobalJobId: string
+};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.2.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.2.lib.aql
new file mode 100644
index 0000000..0290611
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.2.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql
new file mode 100644
index 0000000..b47ccc3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.3.ddl.aql
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+use dataverse externallibtest;
+
+create external dataset Condor(Classad) using localfs(
+("path"="asterix_nc1://data/external-parser/jobads.old"),
+("reader"="line-separated"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"),
+("reader-stream"="localfs"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.4.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.4.query.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.4.query.aql
new file mode 100644
index 0000000..9d5d499
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.4.query.aql
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse externallibtest;
+
+for $x in dataset Condor
+order by $x.GlobalJobId
+return $x;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.5.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.5.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.5.lib.aql
new file mode 100644
index 0000000..86af80f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/classad-parser2/classad-parser2.5.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.1.ddl.aql
new file mode 100644
index 0000000..7d11e88
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.1.ddl.aql
@@ -0,0 +1,26 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+drop dataverse externallibtest if exists;
+create dataverse externallibtest;
+use dataverse externallibtest;
+
+create type CountryCapitalType if not exists as closed {
+country: string,
+capital: string
+};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.2.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.2.lib.aql
new file mode 100644
index 0000000..0290611
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.2.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.3.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.3.query.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.3.query.aql
new file mode 100644
index 0000000..863da20
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.3.query.aql
@@ -0,0 +1,23 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+use dataverse externallibtest;
+
+let $input:=["England","Italy","China","United States","India","Jupiter"]
+for $country in $input
+return testlib#getCapital($country)

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.4.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.4.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.4.lib.aql
new file mode 100644
index 0000000..86af80f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/getCapital/getCapital.4.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.1.ddl.aql
new file mode 100644
index 0000000..2873c48
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.1.ddl.aql
@@ -0,0 +1,38 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed dataset that uses the feed simulator adapter.
+                  The feed simulator simulates feed from a file in the local fs.
+                  Associate with the feed an external user-defined function. The UDF
+                  finds topics in each tweet. A topic is identified by a #.
+                  Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date         : 23rd Apr 2013
+ */
+drop dataverse externallibtest if exists;
+create dataverse externallibtest;
+use dataverse externallibtest;
+
+create type TestTypedAdapterOutputType as closed {
+  tweetid: int64,
+  message-text: string
+}
+
+create dataset TweetsTestAdapter(TestTypedAdapterOutputType)
+primary key tweetid;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.2.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.2.lib.aql
new file mode 100644
index 0000000..0290611
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.2.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.3.ddl.aql
new file mode 100644
index 0000000..800cef3
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.3.ddl.aql
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed dataset that uses the feed simulator adapter.
+                  The feed simulator simulates feed from a file in the local fs.
+                  Associate with the feed an external user-defined function. The UDF
+                  finds topics in each tweet. A topic is identified by a #.
+                  Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date         : 23rd Apr 2013
+ */
+
+use dataverse externallibtest;
+create feed TestTypedAdapterFeed
+using "testlib#test_typed_adapter" (("num_output_records"="5"),("type-name"="TestTypedAdapterOutputType"));

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.4.update.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.4.update.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.4.update.aql
new file mode 100644
index 0000000..8dc330b
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.4.update.aql
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed dataset that uses the feed simulator adapter.
+                  The feed simulator simulates feed from a file in the local fs.
+                  Associate with the feed an external user-defined function. The UDF
+                  finds topics in each tweet. A topic is identified by a #.
+                  Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date         : 23rd Apr 2013
+ */
+use dataverse externallibtest;
+
+set wait-for-completion-feed "true";
+
+connect feed TestTypedAdapterFeed to dataset TweetsTestAdapter;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.5.query.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.5.query.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.5.query.aql
new file mode 100644
index 0000000..2860f17
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.5.query.aql
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed dataset that uses the feed simulator adapter.
+                  The feed simulator simulates feed from a file in the local fs.
+                  Associate with the feed an external user-defined function. The UDF
+                  finds topics in each tweet. A topic is identified by a #.
+                  Begin ingestion and apply external user defined function
+ * Expected Res : Success
+ * Date         : 23rd Apr 2013
+ */
+use dataverse externallibtest;
+
+for $x in dataset TweetsTestAdapter
+order by $x.tweetid
+return $x

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.6.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.6.lib.aql b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.6.lib.aql
new file mode 100644
index 0000000..86af80f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/external-library/typed_adapter/typed_adapter.6.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+uninstall externallibtest testlib
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.1.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.1.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.1.ddl.aql
new file mode 100644
index 0000000..bc6d15f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.1.ddl.aql
@@ -0,0 +1,32 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed which uses an external parser to parse data from files
+ *                The files have duplicates and long records
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+
+drop dataverse externallibtest if exists;
+create dataverse externallibtest;
+use dataverse externallibtest;
+
+create type Classad as open {
+GlobalJobId: string
+};

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.2.lib.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.2.lib.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.2.lib.aql
new file mode 100644
index 0000000..0290611
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.2.lib.aql
@@ -0,0 +1,19 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+install externallibtest testlib src/test/resources/externallib/testlib-zip-binary-assembly.zip
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql
new file mode 100644
index 0000000..8f0756f
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/queries/feeds/feed-with-external-parser/feed-with-external-parser.3.ddl.aql
@@ -0,0 +1,33 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+/*
+ * Description  : Create a feed which uses an external parser to parse data from files
+ *                The files have duplicates and long records
+ * Expected Res : Success
+ * Date         : Feb, 09, 2016
+ */
+use dataverse externallibtest;
+create dataset Condor(Classad) primary key GlobalJobId;
+
+create feed CondorFeed using push_localfs(
+("path"="asterix_nc1://data/external-parser/dropbox/jobads1.txt,asterix_nc1://data/external-parser/dropbox/jobads2.txt"),
+("reader"="semi-structured"),
+("parser"="testlib#org.apache.asterix.external.library.ClassAdParserFactory"),
+("reader-stream"="localfs"),
+("type-name"="Classad"));


[28/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/data/external-parser/testdata.txt
----------------------------------------------------------------------
diff --git a/asterix-app/data/external-parser/testdata.txt b/asterix-app/data/external-parser/testdata.txt
new file mode 100644
index 0000000..db20559
--- /dev/null
+++ b/asterix-app/data/external-parser/testdata.txt
@@ -0,0 +1,684032 @@
+Machine = "glow-c005.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391693
+UpdateSequenceNumber = 1447
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223717.78"
+PublicClaimId = "<128.105.245.5:35840>#1358047163#123#..."
+TotalTimeMatchedIdle = 8
+HasMPI = true
+TotalClaimRunTime = 7109
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c005"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 23
+Name = "slot1@glow-c005.cs.wisc.edu"
+ImageSize = 108228
+NumPids = 1
+MonitorSelfTime = 1358391570
+TimeToLive = 2147483647
+KeyboardIdle = 345726
+LastBenchmark = 1358374272
+TotalDisk = 75664548
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358374272
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1445
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.095821
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 7279
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 337084
+MonitorSelfImageSize = 9400.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391441
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 94
+TotalMemory = 2025
+DaemonStartTime = 1358047163
+EnteredCurrentActivity = 1358390676
+MyAddress = "<128.105.245.5:35840>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223717.78#1358389443"
+HasJava = true
+EnteredCurrentState = 1358384583
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1012
+IsWakeAble = true
+MyCurrentTime = 1358391693
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 36
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1261
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x48000000000a051140002aaaaaa81400"
+KFlops = 1089447
+UpdatesSequenced = 1443
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4656
+Arch = "INTEL"
+Mips = 2522
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391441
+HasTDP = true
+ConsoleIdle = 345726
+SubnetMask = "255.255.254.0"
+UpdatesLost = 254
+TotalJobRunTime = 1017
+StartdIpAddr = "<128.105.245.5:35840>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.060000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 7
+HibernationState = "NONE"
+JavaMFlops = 310.359924
+MonitorSelfAge = 344408
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390676
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37832274
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c005.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:02:b3:d9:0e:ba"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.060000
+
+Machine = "glow-c005.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391694
+UpdateSequenceNumber = 1435
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223679.4"
+PublicClaimId = "<128.105.245.5:35840>#1358047163#125#..."
+TotalTimeMatchedIdle = 2
+HasMPI = true
+TotalClaimRunTime = 7087
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c005"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 10
+Name = "slot2@glow-c005.cs.wisc.edu"
+ImageSize = 115004
+NumPids = 1
+MonitorSelfTime = 1358391570
+TimeToLive = 2147483647
+KeyboardIdle = 345726
+LastBenchmark = 1358374272
+TotalDisk = 75664548
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358374272
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1436
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.095821
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 8211
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 336211
+MonitorSelfImageSize = 9400.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391429
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 82
+TotalMemory = 2025
+DaemonStartTime = 1358047163
+EnteredCurrentActivity = 1358384607
+MyAddress = "<128.105.245.5:35840>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223679.4#1358384461"
+HasJava = true
+EnteredCurrentState = 1358384601
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1012
+IsWakeAble = true
+MyCurrentTime = 1358391694
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 9
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1261
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00200000080000020000000000000000"
+KFlops = 1089447
+UpdatesSequenced = 1435
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4656
+Arch = "INTEL"
+Mips = 2522
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391429
+HasTDP = true
+ConsoleIdle = 345726
+SubnetMask = "255.255.254.0"
+UpdatesLost = 25
+TotalJobRunTime = 7087
+StartdIpAddr = "<128.105.245.5:35840>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.060000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 7
+HibernationState = "NONE"
+JavaMFlops = 310.359924
+MonitorSelfAge = 344408
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358384607
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37832274
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c005.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:02:b3:d9:0e:ba"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.060000
+
+Machine = "glow-c015.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391469
+UpdateSequenceNumber = 1190
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_33"
+OSG_major = 3
+JobId = "1223684.3"
+PublicClaimId = "<128.105.245.15:56069>#1358102086#81#..."
+TotalTimeMatchedIdle = 4
+HasMPI = true
+TotalClaimRunTime = 5157
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c015"
+JavaVendor = "Sun Microsystems Inc."
+HasAFS_OSG = true
+TotalTimePreemptingVacating = 5
+Name = "slot1@glow-c015.cs.wisc.edu"
+ImageSize = 115048
+NumPids = 1
+MonitorSelfTime = 1358391300
+TimeToLive = 2147483647
+KeyboardIdle = 290579
+LastBenchmark = 1358379296
+TotalDisk = 75658512
+OSglibc_minor = 5
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358379296
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1187
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.091578
+HasAFS_Atlas = true
+ClockDay = 3
+IsWakeOnLanEnabled = true
+HasCVMFS_CMS = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 6581
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasAFS = true
+AFSCacheUsed = 132
+HasIOProxy = true
+TotalTimeClaimedBusy = 282700
+MonitorSelfImageSize = 9724.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391303
+OSIssue = "Scientific Linux release 5.8 (Boron)"
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 46
+OSG_micro = 8
+TotalMemory = 2025
+DaemonStartTime = 1358102086
+EnteredCurrentActivity = 1358386312
+MyAddress = "<128.105.245.15:56069>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223684.3#1358384463"
+AFS_SYSNAMES = "'i386_linux26'"
+HasJava = true
+EnteredCurrentState = 1358386312
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1012
+IsWakeAble = true
+OSglibc_major = 2
+MyCurrentTime = 1358391469
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 38
+AFSCacheAvail = 100000
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1257
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+HasCVMFS_Atlas = true
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x55554a80000000355400005555554001"
+KFlops = 1042629
+UpdatesSequenced = 1186
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4708
+AFS_SYSNAME = "i386_linux26"
+Arch = "INTEL"
+Mips = 2435
+Activity = "Busy"
+OSKernelRelease = "2.6.18-308.13.1.el5"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+OSlibc6 = "libc-2.5.so"
+LastFetchWorkCompleted = 1358391304
+HasTDP = true
+ConsoleIdle = 290579
+SubnetMask = "255.255.254.0"
+UpdatesLost = 208
+OSRedHatRelease = "Scientific Linux release 5.8 (Boron)"
+TotalJobRunTime = 5157
+StartdIpAddr = "<128.105.245.15:56069>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+OSG_minor = 1
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.120000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 5
+HibernationState = "NONE"
+JavaMFlops = 310.838867
+MonitorSelfAge = 289215
+LoadAvg = 1.060000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+OSglibc_micro = 0
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358386312
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37829256
+VirtualMemory = 2124538
+TotalVirtualMemory = 4249076
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c015.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:02:b3:d9:0e:43"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.060000
+TotalCondorLoadAvg = 2.120000
+
+Machine = "glow-c015.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391773
+UpdateSequenceNumber = 1174
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_33"
+OSG_major = 3
+JobId = "1223718.77"
+PublicClaimId = "<128.105.245.15:56069>#1358102086#78#..."
+TotalTimeMatchedIdle = 3
+HasMPI = true
+TotalClaimRunTime = 7167
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c015"
+JavaVendor = "Sun Microsystems Inc."
+HasAFS_OSG = true
+TotalTimePreemptingVacating = 8
+Name = "slot2@glow-c015.cs.wisc.edu"
+ImageSize = 108056
+NumPids = 1
+MonitorSelfTime = 1358391540
+TimeToLive = 2147483647
+KeyboardIdle = 290884
+LastBenchmark = 1358379296
+TotalDisk = 75658512
+OSglibc_minor = 5
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358379296
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1175
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.091913
+HasAFS_Atlas = true
+ClockDay = 3
+IsWakeOnLanEnabled = true
+HasCVMFS_CMS = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 5074
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasAFS = true
+AFSCacheUsed = 132
+HasIOProxy = true
+TotalTimeClaimedBusy = 284532
+MonitorSelfImageSize = 9724.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391549
+OSIssue = "Scientific Linux release 5.8 (Boron)"
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 54
+OSG_micro = 8
+TotalMemory = 2025
+DaemonStartTime = 1358102086
+EnteredCurrentActivity = 1358390882
+MyAddress = "<128.105.245.15:56069>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223718.77#1358389444"
+AFS_SYSNAMES = "'i386_linux26'"
+HasJava = true
+EnteredCurrentState = 1358384600
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1012
+IsWakeAble = true
+OSglibc_major = 2
+MyCurrentTime = 1358391773
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 7
+AFSCacheAvail = 100000
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+HasCVMFS_Atlas = true
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000000000000000000000000000"
+KFlops = 1042629
+UpdatesSequenced = 1174
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4708
+AFS_SYSNAME = "i386_linux26"
+Arch = "INTEL"
+Mips = 2435
+Activity = "Busy"
+OSKernelRelease = "2.6.18-308.13.1.el5"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+OSlibc6 = "libc-2.5.so"
+LastFetchWorkCompleted = 1358391549
+HasTDP = true
+ConsoleIdle = 290884
+SubnetMask = "255.255.254.0"
+UpdatesLost = 27
+OSRedHatRelease = "Scientific Linux release 5.8 (Boron)"
+TotalJobRunTime = 891
+StartdIpAddr = "<128.105.245.15:56069>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+OSG_minor = 1
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.060000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 5
+HibernationState = "NONE"
+JavaMFlops = 310.838867
+MonitorSelfAge = 289455
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+OSglibc_micro = 0
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390882
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37829256
+VirtualMemory = 2124538
+TotalVirtualMemory = 4249076
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c015.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:02:b3:d9:0e:43"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.060000
+
+Machine = "glow-c070.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391832
+UpdateSequenceNumber = 1217
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223679.3"
+PublicClaimId = "<128.105.245.70:39810>#1358102709#113#..."
+TotalTimeMatchedIdle = 10
+HasMPI = true
+TotalClaimRunTime = 7232
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c070"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 7
+Name = "slot1@glow-c070.cs.wisc.edu"
+ImageSize = 115536
+NumPids = 1
+MonitorSelfTime = 1358391678
+TimeToLive = 2147483647
+KeyboardIdle = 290322
+LastBenchmark = 1358373471
+TotalDisk = 72254488
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358373471
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1833
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.096063
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 6805
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 282154
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391831
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 102
+TotalMemory = 3862
+DaemonStartTime = 1358102709
+EnteredCurrentActivity = 1358384600
+MyAddress = "<128.105.245.70:39810>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223679.3#1358384461"
+HasJava = true
+EnteredCurrentState = 1358384596
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391832
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 38
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1263
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00555505040000002554000000000aaa"
+KFlops = 1057312
+UpdatesSequenced = 1834
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3804
+Arch = "INTEL"
+Mips = 2558
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391831
+HasTDP = true
+ConsoleIdle = 290322
+SubnetMask = "255.255.254.0"
+UpdatesLost = 236
+TotalJobRunTime = 7232
+StartdIpAddr = "<128.105.245.70:39810>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.070000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 607.665710
+MonitorSelfAge = 288969
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358384600
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127244
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c070.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b3:f4"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.070000
+
+Machine = "glow-c070.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391833
+UpdateSequenceNumber = 1243
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223709.73"
+PublicClaimId = "<128.105.245.70:39810>#1358102709#112#..."
+TotalTimeMatchedIdle = 9
+HasMPI = true
+TotalClaimRunTime = 7225
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c070"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 14
+Name = "slot2@glow-c070.cs.wisc.edu"
+ImageSize = 109656
+NumPids = 1
+MonitorSelfTime = 1358391678
+TimeToLive = 2147483647
+KeyboardIdle = 290322
+LastBenchmark = 1358373471
+TotalDisk = 72254488
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358373471
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1866
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.096063
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 8633
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 280335
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391831
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 117
+TotalMemory = 3862
+DaemonStartTime = 1358102709
+EnteredCurrentActivity = 1358389635
+MyAddress = "<128.105.245.70:39810>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223709.73#1358387884"
+HasJava = true
+EnteredCurrentState = 1358384603
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391833
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 9
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1263
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00800000000000000008000000000000"
+KFlops = 1057312
+UpdatesSequenced = 1864
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3804
+Arch = "INTEL"
+Mips = 2558
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391831
+HasTDP = true
+ConsoleIdle = 290322
+SubnetMask = "255.255.254.0"
+UpdatesLost = 36
+TotalJobRunTime = 2198
+StartdIpAddr = "<128.105.245.70:39810>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.070000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 607.665710
+MonitorSelfAge = 288969
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389635
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127244
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c070.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b3:f4"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.030000
+TotalCondorLoadAvg = 2.070000
+
+Machine = "glow-c071.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391970
+UpdateSequenceNumber = 1407
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223724.31"
+PublicClaimId = "<128.105.245.71:46103>#1358104853#131#..."
+TotalTimeMatchedIdle = 11
+HasMPI = true
+TotalClaimRunTime = 7370
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c071"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 6
+Name = "slot1@glow-c071.cs.wisc.edu"
+ImageSize = 107580
+NumPids = 1
+MonitorSelfTime = 1358391902
+TimeToLive = 2147483647
+KeyboardIdle = 288315
+LastBenchmark = 1358371066
+TotalDisk = 72255560
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371066
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1408
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.095844
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 90047
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 196716
+MonitorSelfImageSize = 9760.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 244
+TotalMemory = 3862
+DaemonStartTime = 1358104853
+EnteredCurrentActivity = 1358391667
+MyAddress = "<128.105.245.71:46103>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223724.31#1358389446"
+HasJava = true
+EnteredCurrentState = 1358384599
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391970
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 84
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1266
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x054a8000000aaa024421000200041000"
+KFlops = 1097557
+UpdatesSequenced = 1407
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3784
+Arch = "INTEL"
+Mips = 2459
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 288315
+SubnetMask = "255.255.254.0"
+UpdatesLost = 209
+TotalJobRunTime = 303
+StartdIpAddr = "<128.105.245.71:46103>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.200000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 554.881592
+MonitorSelfAge = 287050
+LoadAvg = 1.100000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358391667
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127780
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c071.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:0e"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.100000
+TotalCondorLoadAvg = 2.200000
+
+Machine = "glow-c071.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391971
+UpdateSequenceNumber = 1387
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223718.5"
+PublicClaimId = "<128.105.245.71:46103>#1358104853#132#..."
+TotalTimeMatchedIdle = 10
+HasMPI = true
+TotalClaimRunTime = 7360
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c071"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 13
+Name = "slot2@glow-c071.cs.wisc.edu"
+ImageSize = 108748
+NumPids = 1
+MonitorSelfTime = 1358391902
+TimeToLive = 2147483647
+KeyboardIdle = 288315
+LastBenchmark = 1358371066
+TotalDisk = 72255560
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371066
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1388
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.095844
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 90517
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 196277
+MonitorSelfImageSize = 9760.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 256
+TotalMemory = 3862
+DaemonStartTime = 1358104853
+EnteredCurrentActivity = 1358390729
+MyAddress = "<128.105.245.71:46103>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223718.5#1358389444"
+HasJava = true
+EnteredCurrentState = 1358384604
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391971
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 36
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1266
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000000000000000000000000000"
+KFlops = 1097557
+UpdatesSequenced = 1387
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3784
+Arch = "INTEL"
+Mips = 2459
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 288315
+SubnetMask = "255.255.254.0"
+UpdatesLost = 20
+TotalJobRunTime = 1242
+StartdIpAddr = "<128.105.245.71:46103>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.200000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 554.881592
+MonitorSelfAge = 287050
+LoadAvg = 1.100000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390729
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127780
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c071.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:0e"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.100000
+TotalCondorLoadAvg = 2.200000
+
+Machine = "glow-c072.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391927
+UpdateSequenceNumber = 1367
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1220853.129"
+PublicClaimId = "<128.105.245.72:60862>#1358100963#125#..."
+TotalTimeMatchedIdle = 12
+HasMPI = true
+TotalClaimRunTime = 13278
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c072"
+JavaVendor = "Sun Microsystems Inc."
+Name = "slot1@glow-c072.cs.wisc.edu"
+ImageSize = 15496
+NumPids = 1
+MonitorSelfTime = 1358391855
+TimeToLive = 2147483647
+KeyboardIdle = 292162
+LastBenchmark = 1358371142
+TotalDisk = 75669884
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371142
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1366
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.083278
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 90240
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 200411
+MonitorSelfImageSize = 9768.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 214
+TotalMemory = 2026
+DaemonStartTime = 1358100963
+EnteredCurrentActivity = 1358378649
+MyAddress = "<128.105.245.72:60862>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1220853.129#1358328602"
+HasJava = true
+EnteredCurrentState = 1358378649
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391927
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 77
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x555000222aa900000000000000040100"
+KFlops = 1101446
+UpdatesSequenced = 1367
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3796
+Arch = "INTEL"
+Mips = 2645
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 292162
+SubnetMask = "255.255.254.0"
+UpdatesLost = 196
+TotalJobRunTime = 13278
+StartdIpAddr = "<128.105.245.72:60862>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.080000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 607.304688
+MonitorSelfAge = 290893
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358378649
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37834942
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c072.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b2:c8"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.040000
+TotalCondorLoadAvg = 2.080000
+
+Machine = "glow-c072.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391928
+UpdateSequenceNumber = 1383
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223718.53"
+PublicClaimId = "<128.105.245.72:60862>#1358100963#132#..."
+TotalTimeMatchedIdle = 15
+HasMPI = true
+TotalClaimRunTime = 7327
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c072"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 9
+Name = "slot2@glow-c072.cs.wisc.edu"
+ImageSize = 106828
+NumPids = 1
+MonitorSelfTime = 1358391855
+TimeToLive = 2147483647
+KeyboardIdle = 292162
+LastBenchmark = 1358371142
+TotalDisk = 75669884
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371142
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1383
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.083278
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 86267
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 204424
+MonitorSelfImageSize = 9768.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 186
+TotalMemory = 2026
+DaemonStartTime = 1358100963
+EnteredCurrentActivity = 1358390852
+MyAddress = "<128.105.245.72:60862>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223718.53#1358389444"
+HasJava = true
+EnteredCurrentState = 1358384598
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391928
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 54
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000010000000000000000000000"
+KFlops = 1101446
+UpdatesSequenced = 1383
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3796
+Arch = "INTEL"
+Mips = 2645
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 292162
+SubnetMask = "255.255.254.0"
+UpdatesLost = 27
+TotalJobRunTime = 1076
+StartdIpAddr = "<128.105.245.72:60862>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.080000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 607.304688
+MonitorSelfAge = 290893
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390852
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37834942
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c072.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b2:c8"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.040000
+TotalCondorLoadAvg = 2.080000
+
+Machine = "glow-c073.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391727
+UpdateSequenceNumber = 1372
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223720.33"
+PublicClaimId = "<128.105.245.73:34400>#1358101136#176#..."
+TotalTimeMatchedIdle = 8
+HasMPI = true
+TotalClaimRunTime = 7120
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c073"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 15
+Name = "slot1@glow-c073.cs.wisc.edu"
+ImageSize = 106824
+NumPids = 1
+MonitorSelfTime = 1358391542
+TimeToLive = 2147483647
+KeyboardIdle = 291787
+LastBenchmark = 1358374310
+TotalDisk = 72254724
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358374310
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1372
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.091721
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 11603
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 278715
+MonitorSelfImageSize = 9768.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391498
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 219
+TotalMemory = 3862
+DaemonStartTime = 1358101136
+EnteredCurrentActivity = 1358391012
+MyAddress = "<128.105.245.73:34400>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223720.33#1358389444"
+HasJava = true
+EnteredCurrentState = 1358384601
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391727
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 25
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00011501100000000090000000000000"
+KFlops = 1097335
+UpdatesSequenced = 1372
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3800
+Arch = "INTEL"
+Mips = 2537
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391498
+HasTDP = true
+ConsoleIdle = 291787
+SubnetMask = "255.255.254.0"
+UpdatesLost = 211
+TotalJobRunTime = 715
+StartdIpAddr = "<128.105.245.73:34400>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.080000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 610.132385
+MonitorSelfAge = 290407
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358391012
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127362
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c073.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:a8"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.040000
+TotalCondorLoadAvg = 2.080000
+
+Machine = "glow-c073.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391728
+UpdateSequenceNumber = 1354
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223712.3"
+PublicClaimId = "<128.105.245.73:34400>#1358101136#175#..."
+TotalTimeMatchedIdle = 13
+HasMPI = true
+TotalClaimRunTime = 7114
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c073"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 15
+Name = "slot2@glow-c073.cs.wisc.edu"
+ImageSize = 111320
+NumPids = 1
+MonitorSelfTime = 1358391542
+TimeToLive = 2147483647
+KeyboardIdle = 291787
+LastBenchmark = 1358374310
+TotalDisk = 72254724
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358374310
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1355
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.091721
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 11820
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 278537
+MonitorSelfImageSize = 9768.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 1358391482
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 184
+TotalMemory = 3862
+DaemonStartTime = 1358101136
+EnteredCurrentActivity = 1358390272
+MyAddress = "<128.105.245.73:34400>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223712.3#1358387885"
+HasJava = true
+EnteredCurrentState = 1358384606
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391728
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 17
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00400000000000000000000000000000"
+KFlops = 1097335
+UpdatesSequenced = 1354
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3800
+Arch = "INTEL"
+Mips = 2537
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 1358391483
+HasTDP = true
+ConsoleIdle = 291787
+SubnetMask = "255.255.254.0"
+UpdatesLost = 22
+TotalJobRunTime = 1456
+StartdIpAddr = "<128.105.245.73:34400>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.080000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 6
+HibernationState = "NONE"
+JavaMFlops = 610.132385
+MonitorSelfAge = 290407
+LoadAvg = 1.040000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390272
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127362
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c073.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:a8"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.040000
+TotalCondorLoadAvg = 2.080000
+
+Machine = "glow-c075.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391927
+UpdateSequenceNumber = 1363
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223705.69"
+PublicClaimId = "<128.105.245.75:51396>#1358102719#145#..."
+TotalTimeMatchedIdle = 12
+HasMPI = true
+TotalClaimRunTime = 7326
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c075"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 6
+Name = "slot1@glow-c075.cs.wisc.edu"
+ImageSize = 109000
+NumPids = 1
+MonitorSelfTime = 1358391689
+TimeToLive = 2147483647
+KeyboardIdle = 290407
+LastBenchmark = 1358371078
+TotalDisk = 72255552
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371078
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1367
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.079210
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 91955
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 196998
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 179
+TotalMemory = 3862
+DaemonStartTime = 1358102719
+EnteredCurrentActivity = 1358389198
+MyAddress = "<128.105.245.75:51396>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223705.69#1358387883"
+HasJava = true
+EnteredCurrentState = 1358384599
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391927
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 48
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000a01112aa8000000480000000001"
+KFlops = 1062936
+UpdatesSequenced = 1369
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3784
+Arch = "INTEL"
+Mips = 2561
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290407
+SubnetMask = "255.255.254.0"
+UpdatesLost = 200
+TotalJobRunTime = 2729
+StartdIpAddr = "<128.105.245.75:51396>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.170000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 612.031982
+MonitorSelfAge = 288972
+LoadAvg = 1.080000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389198
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127776
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c075.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:80"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.080000
+TotalCondorLoadAvg = 2.170000
+
+Machine = "glow-c075.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391928
+UpdateSequenceNumber = 1387
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223709.18"
+PublicClaimId = "<128.105.245.75:51396>#1358102719#146#..."
+TotalTimeMatchedIdle = 25
+HasMPI = true
+TotalClaimRunTime = 7326
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c075"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 11
+Name = "slot2@glow-c075.cs.wisc.edu"
+ImageSize = 109248
+NumPids = 1
+MonitorSelfTime = 1358391689
+TimeToLive = 2147483647
+KeyboardIdle = 290407
+LastBenchmark = 1358371078
+TotalDisk = 72255552
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371078
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1393
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.079210
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 92931
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 195995
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 184
+TotalMemory = 3862
+DaemonStartTime = 1358102719
+EnteredCurrentActivity = 1358389476
+MyAddress = "<128.105.245.75:51396>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223709.18#1358387884"
+HasJava = true
+EnteredCurrentState = 1358384599
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391928
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 53
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000000000000010000000000000"
+KFlops = 1062936
+UpdatesSequenced = 1393
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3784
+Arch = "INTEL"
+Mips = 2561
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290407
+SubnetMask = "255.255.254.0"
+UpdatesLost = 35
+TotalJobRunTime = 2452
+StartdIpAddr = "<128.105.245.75:51396>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.170000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 612.031982
+MonitorSelfAge = 288972
+LoadAvg = 1.090000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389476
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36127776
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c075.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:80"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.080000
+TotalCondorLoadAvg = 2.170000
+
+Machine = "glow-c076.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391931
+UpdateSequenceNumber = 1317
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223711.21"
+PublicClaimId = "<128.105.245.76:37505>#1358101365#147#..."
+TotalTimeMatchedIdle = 16
+HasMPI = true
+TotalClaimRunTime = 7327
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c076"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 5
+Name = "slot1@glow-c076.cs.wisc.edu"
+ImageSize = 108868
+NumPids = 1
+MonitorSelfTime = 1358391773
+TimeToLive = 2147483647
+KeyboardIdle = 291766
+LastBenchmark = 1358371055
+TotalDisk = 75666396
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371055
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1314
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.087485
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 82680
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 207632
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+TotalTimeOwnerIdle = 1
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 187
+TotalMemory = 2026
+DaemonStartTime = 1358101365
+EnteredCurrentActivity = 1358389818
+MyAddress = "<128.105.245.76:37505>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223711.21#1358387884"
+HasJava = true
+EnteredCurrentState = 1358384598
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391931
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 38
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x0055500800002a082000000000000000"
+KFlops = 1079097
+UpdatesSequenced = 1313
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3836
+Arch = "INTEL"
+Mips = 2632
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 291766
+SubnetMask = "255.255.254.0"
+UpdatesLost = 197
+TotalJobRunTime = 2113
+StartdIpAddr = "<128.105.245.76:37505>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.050000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 613.514221
+MonitorSelfAge = 290409
+LoadAvg = 1.030000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389818
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37833198
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c076.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:cb:ea"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.020000
+TotalCondorLoadAvg = 2.050000
+
+Machine = "glow-c076.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391932
+UpdateSequenceNumber = 1349
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223707.26"
+PublicClaimId = "<128.105.245.76:37505>#1358101365#148#..."
+TotalTimeMatchedIdle = 14
+HasMPI = true
+TotalClaimRunTime = 7323
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c076"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 11
+Name = "slot2@glow-c076.cs.wisc.edu"
+ImageSize = 108568
+NumPids = 1
+MonitorSelfTime = 1358391773
+TimeToLive = 2147483647
+KeyboardIdle = 291766
+LastBenchmark = 1358371055
+TotalDisk = 75666396
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358371055
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1350
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.087485
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 89486
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 200766
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 216
+TotalMemory = 2026
+DaemonStartTime = 1358101365
+EnteredCurrentActivity = 1358389166
+MyAddress = "<128.105.245.76:37505>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223707.26#1358387883"
+HasJava = true
+EnteredCurrentState = 1358384604
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391932
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 67
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1265
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000200000000100040400000000"
+KFlops = 1079097
+UpdatesSequenced = 1349
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3836
+Arch = "INTEL"
+Mips = 2632
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 291766
+SubnetMask = "255.255.254.0"
+UpdatesLost = 39
+TotalJobRunTime = 2766
+StartdIpAddr = "<128.105.245.76:37505>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.050000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 613.514221
+MonitorSelfAge = 290409
+LoadAvg = 1.020000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389166
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37833198
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c076.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:cb:ea"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.020000
+TotalCondorLoadAvg = 2.050000
+
+Machine = "glow-c079.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391780
+UpdateSequenceNumber = 1349
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223715.87"
+PublicClaimId = "<128.105.245.79:56567>#1358102103#131#..."
+TotalTimeMatchedIdle = 13
+HasMPI = true
+TotalClaimRunTime = 7170
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c079"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 12
+Name = "slot1@glow-c079.cs.wisc.edu"
+ImageSize = 109544
+NumPids = 1
+MonitorSelfTime = 1358391553
+TimeToLive = 2147483647
+KeyboardIdle = 290873
+LastBenchmark = 1358376897
+TotalDisk = 72253784
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358376897
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1350
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.083358
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 90020
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 198250
+MonitorSelfImageSize = 9760.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 1315
+TotalMemory = 3862
+DaemonStartTime = 1358102103
+EnteredCurrentActivity = 1358390408
+MyAddress = "<128.105.245.79:56567>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223715.87#1358389443"
+HasJava = true
+EnteredCurrentState = 1358384604
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391780
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 57
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x09550000000150000000004300000020"
+KFlops = 1077862
+UpdatesSequenced = 1351
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4672
+Arch = "INTEL"
+Mips = 2541
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290873
+SubnetMask = "255.255.254.0"
+UpdatesLost = 205
+TotalJobRunTime = 1372
+StartdIpAddr = "<128.105.245.79:56567>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.010000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 608.318970
+MonitorSelfAge = 289451
+LoadAvg = 1.000000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390408
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36126892
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c079.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:70"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.000000
+TotalCondorLoadAvg = 2.010000
+
+Machine = "glow-c079.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391781
+UpdateSequenceNumber = 1370
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223706.79"
+PublicClaimId = "<128.105.245.79:56567>#1358102103#132#..."
+TotalTimeMatchedIdle = 15
+HasMPI = true
+TotalClaimRunTime = 7170
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c079"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 15
+Name = "slot2@glow-c079.cs.wisc.edu"
+ImageSize = 110448
+NumPids = 1
+MonitorSelfTime = 1358391553
+TimeToLive = 2147483647
+KeyboardIdle = 290873
+LastBenchmark = 1358376897
+TotalDisk = 72253784
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358376897
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1373
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.083358
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 89020
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 200298
+MonitorSelfImageSize = 9760.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 244
+TotalMemory = 3862
+DaemonStartTime = 1358102103
+EnteredCurrentActivity = 1358389094
+MyAddress = "<128.105.245.79:56567>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223706.79#1358387883"
+HasJava = true
+EnteredCurrentState = 1358384604
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1931
+IsWakeAble = true
+MyCurrentTime = 1358391781
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 76
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1262
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x00000000000000010000200000000000"
+KFlops = 1077862
+UpdatesSequenced = 1372
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 4672
+Arch = "INTEL"
+Mips = 2541
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290873
+SubnetMask = "255.255.254.0"
+UpdatesLost = 27
+TotalJobRunTime = 2687
+StartdIpAddr = "<128.105.245.79:56567>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.010000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 608.318970
+MonitorSelfAge = 289451
+LoadAvg = 1.010000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358389094
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 36126892
+VirtualMemory = 4048360
+TotalVirtualMemory = 8096720
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c079.cs.wisc.edu"
+SlotID = 2
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:a2:70"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.000000
+TotalCondorLoadAvg = 2.010000
+
+Machine = "glow-c080.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391424
+UpdateSequenceNumber = 1393
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" ) || User == "osg_cmsprod@hep.wisc.edu" || TARGET.Group =?= "IceCube" || TARGET.Group =?= "LMCG" || TARGET.Group =?= "ATLAS" || TARGET.Group =?= "ChemE" || TARGET.Group =?= "MedPhys"
+JavaVersion = "1.6.0_37"
+JobId = "1223719.58"
+PublicClaimId = "<128.105.245.80:56596>#1358102116#144#..."
+TotalTimeMatchedIdle = 11
+HasMPI = true
+TotalClaimRunTime = 6817
+CpuIsBusy = false
+HasVM = false
+FileSystemDomain = "glow-c080"
+JavaVendor = "Sun Microsystems Inc."
+TotalTimePreemptingVacating = 8
+Name = "slot1@glow-c080.cs.wisc.edu"
+ImageSize = 108004
+NumPids = 1
+MonitorSelfTime = 1358391322
+TimeToLive = 2147483647
+KeyboardIdle = 290505
+LastBenchmark = 1358376372
+TotalDisk = 75666200
+MaxJobRetirementTime = ( ( TARGET.Group =?= MY.Site ) && JobUniverse != 1 ) * 3600 * 24 * 3
+LastUpdate = 1358376372
+Unhibernate = MY.MachineLastMatchTime =!= undefined
+CondorPlatform = "$CondorPlatform: x86_rhap_5 $"
+HasJICLocalStdin = true
+UpdatesTotal = 1394
+Cpus = 1
+IsValidCheckpointPlatform = ( ( ( TARGET.JobUniverse == 1 ) == false ) || ( ( MY.CheckpointPlatform =!= undefined ) && ( ( TARGET.LastCheckpointPlatform =?= MY.CheckpointPlatform ) || ( TARGET.NumCkpts == 0 ) ) ) )
+MonitorSelfCPUUsage = 0.087520
+ClockDay = 3
+IsWakeOnLanEnabled = true
+StarterAbilityList = "HasMPI,HasVM,HasJICLocalStdin,HasJICLocalConfig,HasJava,HasJobDeferral,HasTDP,HasFileTransfer,HasPerFileEncryption,HasReconnect,HasRemoteSyscalls,HasCheckpointing"
+JavaSpecificationVersion = "1.6"
+TotalTimeUnclaimedIdle = 80472
+CondorVersion = "$CondorVersion: 7.6.6 Jan 17 2012 BuildID: 401976 $"
+JobUniverse = 5
+HasIOProxy = true
+TotalTimeClaimedBusy = 208561
+MonitorSelfImageSize = 9764.000000
+Group = "LMCG"
+HibernationSupportedStates = "S4"
+ExecutableSize = 0
+LastFetchWorkSpawned = 0
+Requirements = ( START ) && ( IsValidCheckpointPlatform )
+SuspendedByAdmin = false
+TotalTimeClaimedIdle = 209
+TotalMemory = 2026
+DaemonStartTime = 1358102116
+EnteredCurrentActivity = 1358390907
+MyAddress = "<128.105.245.80:56596>"
+HasJICLocalConfig = true
+GlobalJobId = "condor.lmcg.wisc.edu#1223719.58#1358389444"
+HasJava = true
+EnteredCurrentState = 1358384601
+CpuBusyTime = 0
+CpuBusy = ( ( LoadAvg - CondorLoadAvg ) >= 0.500000 )
+COLLECTOR_HOST_STRING = "cm.chtc.wisc.edu, glowserv01.hep.wisc.edu"
+Memory = 1013
+IsWakeAble = true
+MyCurrentTime = 1358391424
+MonitorSelfRegisteredSocketCount = 3
+TotalTimeUnclaimedBenchmarking = 41
+TotalCpus = 2
+CkptServer = "glow-s001.cs.wisc.edu"
+ClockMin = 1257
+CurrentRank = 0.0
+Site = "UWCS"
+NextFetchWorkDelay = 300
+AuthenticatedIdentity = "unauthenticated@unmapped"
+OpSys = "LINUX"
+State = "Claimed"
+UpdatesHistory = "0x4aa800000000a0000000900120000000"
+KFlops = 1067617
+UpdatesSequenced = 1393
+Start = ( SuspendedByAdmin =!= true ) && ( TARGET.JobUniverse != 1 )
+RemoteUser = "szhou@lmcg.wisc.edu"
+HasRemoteSyscalls = true
+HasJobDeferral = true
+HasCheckpointing = true
+MonitorSelfResidentSetSize = 3792
+Arch = "INTEL"
+Mips = 2356
+Activity = "Busy"
+ClientMachine = "condor.lmcg.wisc.edu"
+IsWakeOnLanSupported = true
+LastFetchWorkCompleted = 0
+HasTDP = true
+ConsoleIdle = 290505
+SubnetMask = "255.255.254.0"
+UpdatesLost = 209
+TotalJobRunTime = 517
+StartdIpAddr = "<128.105.245.80:56596>"
+WakeOnLanEnabledFlags = "Magic Packet"
+NiceUser = false
+TargetType = "Job"
+HibernationLevel = 0
+HasFileTransfer = true
+TotalLoadAvg = 2.150000
+Rank = ( TARGET.Group =?= "UWCS" ) - 0.500000 * ( MY.IsGLOWMember =!= true ) - ( Owner =?= "backfill" )
+MonitorSelfSecuritySessions = 9
+HibernationState = "NONE"
+JavaMFlops = 551.267944
+MonitorSelfAge = 289207
+LoadAvg = 1.070000
+WakeOnLanSupportedFlags = "UniCast Packet,MultiCast Packet,BroadCast Packet,Magic Packet"
+HasPerFileEncryption = true
+CheckpointPlatform = "LINUX INTEL 2.6.x normal 0x40000000"
+JobStart = 1358390907
+CurrentTime = time()
+RemoteOwner = "szhou@lmcg.wisc.edu"
+Disk = 37833100
+VirtualMemory = 2124576
+TotalVirtualMemory = 4249152
+TotalSlots = 2
+GLOWManaged = true
+UidDomain = "glow-c080.cs.wisc.edu"
+SlotID = 1
+AtlasLocation = "/home/atlas"
+IsDedicated = true
+HasSwapCkpt = false
+SlotWeight = CPUs
+HasReconnect = true
+HardwareAddress = "00:30:48:53:b3:b4"
+IsGeneralPurposeSlot = true
+MyType = "Machine"
+CanHibernate = true
+CondorLoadAvg = 1.070000
+TotalCondorLoadAvg = 2.150000
+
+Machine = "glow-c080.cs.wisc.edu"
+IsGeneralPurposeVM = true
+FlockingFirewall = false
+CODUsers = "gthain gihan"
+LastHeardFrom = 1358391719
+UpdateSequenceNumber = 1370
+IsGLOWMember = TARGET.Group =?= "UWCS" || ( TARGET.Group =?= "HEP" && HEP_VO =?= "uscms" )

<TRUNCATED>


[34/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
Enabled Feed Tests and Added External Library tests

Feed tests had been switched off for a while due to having too many
sporadic failures. Now, we are switching them back on.
In addition, a new set of tests have been added to test that external
library works as expected.

Change-Id: Idd1fccd136fa2645b2707bbf7c04e60991ae8d4a
Reviewed-on: https://asterix-gerrit.ics.uci.edu/625
Tested-by: Jenkins <je...@fulliautomatix.ics.uci.edu>
Reviewed-by: abdullah alamoudi <ba...@gmail.com>


Project: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/repo
Commit: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/commit/ac683db0
Tree: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/tree/ac683db0
Diff: http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/diff/ac683db0

Branch: refs/heads/master
Commit: ac683db0848ac45991ee7e953c10251ae89b74f4
Parents: 8259452
Author: Abdullah Alamoudi <ba...@gmail.com>
Authored: Tue Feb 23 00:59:31 2016 +0300
Committer: abdullah alamoudi <ba...@gmail.com>
Committed: Mon Feb 22 14:28:04 2016 -0800

----------------------------------------------------------------------
 .gitattributes                                  |      3 +
 .../data/external-parser/August16-20-long.txt   |   1106 +
 .../data/external-parser/dropbox/jobads1.txt    |  12869 +
 .../data/external-parser/dropbox/jobads2.txt    |  12869 +
 asterix-app/data/external-parser/jobads.new     |  12869 +
 asterix-app/data/external-parser/jobads.old     |   1106 +
 asterix-app/data/external-parser/jobads.txt     |  12869 +
 asterix-app/data/external-parser/testdata.txt   | 684032 ++++++++++++++++
 asterix-app/pom.xml                             |     64 +-
 .../src/main/assembly/binary-assembly.xml       |     10 +
 .../api/http/servlet/ConnectorAPIServlet.java   |      2 +-
 .../asterix/api/http/servlet/FeedServlet.java   |      2 +-
 .../api/http/servlet/FeedServletUtil.java       |      2 +-
 .../app/external/CentralFeedManager.java        |    110 +
 .../external/ExternalIndexingOperations.java    |    762 +
 .../app/external/ExternalLibraryUtils.java      |    402 +
 .../external/FeedJobNotificationHandler.java    |    742 +
 .../apache/asterix/app/external/FeedJoint.java  |    190 +
 .../app/external/FeedLifecycleListener.java     |    499 +
 .../asterix/app/external/FeedLoadManager.java   |    301 +
 .../app/external/FeedMessageReceiver.java       |     96 +
 .../asterix/app/external/FeedOperations.java    |    254 +
 .../app/external/FeedTrackingManager.java       |    187 +
 .../app/external/FeedWorkCollection.java        |    206 +
 .../FeedWorkRequestResponseHandler.java         |    269 +
 .../asterix/app/external/FeedsActivator.java    |    118 +
 .../asterix/aql/translator/QueryTranslator.java |     10 +-
 .../apache/asterix/feed/CentralFeedManager.java |    110 -
 .../feed/FeedJobNotificationHandler.java        |    742 -
 .../java/org/apache/asterix/feed/FeedJoint.java |    190 -
 .../asterix/feed/FeedLifecycleListener.java     |    499 -
 .../apache/asterix/feed/FeedLoadManager.java    |    302 -
 .../asterix/feed/FeedMessageReceiver.java       |     96 -
 .../asterix/feed/FeedTrackingManager.java       |    188 -
 .../apache/asterix/feed/FeedWorkCollection.java |    206 -
 .../feed/FeedWorkRequestResponseHandler.java    |    269 -
 .../org/apache/asterix/feed/FeedsActivator.java |    118 -
 .../file/ExternalIndexingOperations.java        |    760 -
 .../org/apache/asterix/file/FeedOperations.java |    254 -
 .../file/SecondaryIndexOperationsHelper.java    |      1 +
 .../file/SecondaryRTreeOperationsHelper.java    |      1 +
 .../bootstrap/CCApplicationEntryPoint.java      |      7 +-
 .../bootstrap/ExternalLibraryBootstrap.java     |    325 -
 .../hyracks/bootstrap/FeedBootstrap.java        |      2 +-
 .../bootstrap/GlobalRecoveryManager.java        |      4 +-
 .../bootstrap/NCApplicationEntryPoint.java      |      3 +-
 .../http/servlet/ConnectorAPIServletTest.java   |      2 +-
 .../asterix/app/external/TestLibrarian.java     |     76 +
 .../asterix/test/runtime/ExecutionTest.java     |      6 +
 .../asterix/test/runtime/RepeatedTest.java      |     12 +-
 .../classad-parser/classad-parser.1.ddl.aql     |     31 +
 .../classad-parser/classad-parser.2.lib.aql     |     19 +
 .../classad-parser/classad-parser.3.ddl.aql     |     26 +
 .../classad-parser/classad-parser.4.query.aql   |     23 +
 .../classad-parser/classad-parser.5.lib.aql     |     19 +
 .../classad-parser2/classad-parser2.1.ddl.aql   |     31 +
 .../classad-parser2/classad-parser2.2.lib.aql   |     19 +
 .../classad-parser2/classad-parser2.3.ddl.aql   |     26 +
 .../classad-parser2/classad-parser2.4.query.aql |     23 +
 .../classad-parser2/classad-parser2.5.lib.aql   |     19 +
 .../getCapital/getCapital.1.ddl.aql             |     26 +
 .../getCapital/getCapital.2.lib.aql             |     19 +
 .../getCapital/getCapital.3.query.aql           |     23 +
 .../getCapital/getCapital.4.lib.aql             |     19 +
 .../typed_adapter/typed_adapter.1.ddl.aql       |     38 +
 .../typed_adapter/typed_adapter.2.lib.aql       |     19 +
 .../typed_adapter/typed_adapter.3.ddl.aql       |     31 +
 .../typed_adapter/typed_adapter.4.update.aql    |     32 +
 .../typed_adapter/typed_adapter.5.query.aql     |     32 +
 .../typed_adapter/typed_adapter.6.lib.aql       |     19 +
 .../feed-with-external-parser.1.ddl.aql         |     32 +
 .../feed-with-external-parser.2.lib.aql         |     19 +
 .../feed-with-external-parser.3.ddl.aql         |     33 +
 .../feed-with-external-parser.4.update.aql      |     30 +
 .../feed-with-external-parser.5.query.aql       |     29 +
 .../feed-with-external-parser.6.lib.aql         |     19 +
 .../feed-with-external-parser.7.ddl.aql         |     25 +
 .../queries/feeds/feeds_06/feeds_06.3.sleep.aql |      2 +-
 .../queries/feeds/feeds_07/feeds_07.1.ddl.aql   |     31 +-
 .../queries/feeds/feeds_08/feeds_08.1.ddl.aql   |     28 +-
 .../queries/feeds/feeds_09/feeds_09.1.ddl.aql   |     28 +-
 .../queries/feeds/feeds_09/feeds_09.4.ddl.aql   |     30 +
 .../queries/feeds/feeds_10/feeds_10.4.ddl.aql   |     30 +
 .../queries/feeds/feeds_12/feeds_12.4.ddl.aql   |     30 +
 .../issue_230_feeds/issue_230_feeds.4.ddl.aql   |     30 +
 .../classad-parser/classad-parser.1.adm         |    100 +
 .../classad-parser2/classad-parser2.1.adm       |      5 +
 .../getCapital/getCapital.1.adm                 |      6 +
 .../typed_adapter/typed_adapter.1.adm           |      5 +
 .../feed-with-external-parser.1.adm             |     99 +
 .../results/feeds/feeds_03/feeds_03.1.adm       |      2 +-
 .../src/test/resources/runtimets/testsuite.xml  |     61 +-
 .../resources/runtimets/testsuite_sqlpp.xml     |      4 +-
 .../apache/asterix/test/aql/ITestLibrarian.java |     31 +
 .../apache/asterix/test/aql/TestExecutor.java   |     48 +-
 .../adapter/factory/GenericAdapterFactory.java  |     46 +-
 .../external/api/IInputStreamProvider.java      |      7 +
 .../external/api/IRecordFlowController.java     |     27 -
 .../asterix/external/api/IRecordReader.java     |     16 +-
 .../AbstractFeedDataFlowController.java         |     12 +-
 .../dataflow/FeedRecordDataFlowController.java  |     79 +-
 .../dataflow/FeedStreamDataFlowController.java  |      5 +
 .../external/dataflow/FeedTupleForwarder.java   |     54 +-
 .../dataflow/IndexingDataFlowController.java    |     19 +-
 .../dataflow/RecordDataFlowController.java      |     25 +-
 .../feed/dataflow/FeedRuntimeInputHandler.java  |     12 +-
 .../reader/couchbase/CouchbaseReader.java       |      8 +-
 .../record/reader/hdfs/HDFSRecordReader.java    |     10 +
 .../record/reader/rss/RSSRecordReader.java      |     10 +
 .../stream/AbstractStreamRecordReader.java      |      8 +
 .../stream/EmptyLineSeparatedRecordReader.java  |    119 +
 .../EmptyLineSeparatedRecordReaderFactory.java  |     45 +
 .../record/reader/stream/LineRecordReader.java  |     13 +-
 .../stream/SemiStructuredRecordReader.java      |      6 +-
 .../reader/twitter/TwitterPullRecordReader.java |     10 +
 .../reader/twitter/TwitterPushRecordReader.java |     10 +
 .../external/input/stream/AInputStream.java     |     13 +-
 .../input/stream/AInputStreamReader.java        |      5 +
 .../external/input/stream/BasicInputStream.java |     16 +
 .../stream/LocalFileSystemInputStream.java      |     20 +-
 .../input/stream/SocketInputStream.java         |     16 +
 .../LocalFSInputStreamProviderFactory.java      |      3 +-
 .../TwitterFirehoseStreamProviderFactory.java   |      3 +-
 .../provider/HDFSInputStreamProvider.java       |     18 +
 .../provider/LocalFSInputStreamProvider.java    |     34 +-
 .../provider/SocketInputStreamProvider.java     |     10 +
 .../TwitterFirehoseInputStreamProvider.java     |     32 +-
 .../library/ExternalFunctionProvider.java       |      3 +-
 .../library/ExternalLibraryManager.java         |      8 +-
 .../FeedCollectOperatorDescriptor.java          |      6 +-
 .../operators/FeedMetaComputeNodePushable.java  |      9 +-
 .../operators/FeedMetaStoreNodePushable.java    |     18 +-
 .../asterix/external/parser/ADMDataParser.java  |     18 +-
 .../provider/AdapterFactoryProvider.java        |      1 +
 .../provider/DataflowControllerProvider.java    |     55 +-
 .../provider/DatasourceFactoryProvider.java     |     10 +
 .../asterix/external/util/DataflowUtils.java    |      7 +-
 .../util/ExternalDataCompatibilityUtils.java    |     21 +-
 .../external/util/ExternalDataConstants.java    |      2 +
 .../external/util/ExternalDataUtils.java        |      3 +-
 .../asterix/external/util/FeedLogManager.java   |     41 +-
 .../apache/asterix/external/util/FeedUtils.java |     40 +-
 .../external/util/FileSystemWatcher.java        |     20 +-
 .../classad/AMutableCharArrayString.java        |    357 +
 .../external/classad/AMutableNumberFactor.java  |     37 +
 .../external/classad/AttributeReference.java    |    474 +
 .../classad/BuiltinClassAdFunctions.java        |   1927 +
 .../external/classad/CaseInsensitiveString.java |     61 +
 .../external/classad/CharArrayLexerSource.java  |     86 +
 .../asterix/external/classad/ClassAd.java       |   1565 +
 .../asterix/external/classad/ClassAdFunc.java   |     25 +
 .../asterix/external/classad/ClassAdTime.java   |    278 +
 .../external/classad/ClassAdUnParser.java       |    492 +
 .../apache/asterix/external/classad/Common.java |     66 +
 .../asterix/external/classad/EvalState.java     |    120 +
 .../asterix/external/classad/ExprList.java      |    280 +
 .../asterix/external/classad/ExprTree.java      |    401 +
 .../external/classad/ExprTreeHolder.java        |    144 +
 .../external/classad/FileLexerSource.java       |     89 +
 .../asterix/external/classad/FunctionCall.java  |    354 +
 .../classad/InputStreamLexerSource.java         |    111 +
 .../apache/asterix/external/classad/Lexer.java  |    962 +
 .../asterix/external/classad/LexerSource.java   |     62 +
 .../asterix/external/classad/Literal.java       |    521 +
 .../asterix/external/classad/Operation.java     |   1902 +
 .../asterix/external/classad/PrettyPrint.java   |    257 +
 .../external/classad/StringLexerSource.java     |     82 +
 .../asterix/external/classad/TokenValue.java    |    157 +
 .../apache/asterix/external/classad/Util.java   |    262 +
 .../apache/asterix/external/classad/Value.java  |    871 +
 .../object/pool/AttributeReferencePool.java     |     35 +
 .../classad/object/pool/BitSetPool.java         |     34 +
 .../object/pool/CaseInsensitiveStringPool.java  |     55 +
 .../object/pool/CharArrayStringPool.java        |     34 +
 .../classad/object/pool/ClassAdPool.java        |     36 +
 .../classad/object/pool/ExprHolderPool.java     |     33 +
 .../classad/object/pool/ExprListPool.java       |     35 +
 .../classad/object/pool/LiteralPool.java        |     33 +
 .../classad/object/pool/OperationPool.java      |     35 +
 .../external/classad/object/pool/Pool.java      |     45 +
 .../classad/object/pool/TokenValuePool.java     |     33 +
 .../external/classad/object/pool/ValuePool.java |     33 +
 .../classad/test/ClassAdFunctionalTest.java     |     57 +
 .../classad/test/ClassAdParserTest.java         |     82 +
 .../external/classad/test/ClassAdToADMTest.java |    132 +
 .../external/classad/test/ClassAdUnitTest.java  |     61 +
 .../classad/test/ClassAdUnitTester.java         |    796 +
 .../external/classad/test/FunctionalTester.java |   1186 +
 .../asterix/external/library/ClassAdParser.java |   1783 +
 .../external/library/ClassAdParserFactory.java  |    100 +
 .../adapter/TestTypedAdapterFactory.java        |     13 +-
 .../external/parser/test/ADMDataParserTest.java |    116 +
 .../operator/file/ADMDataParserTest.java        |    116 -
 .../src/test/resources/August16-20-long.txt     |   1106 +
 .../src/test/resources/functional_tests.txt     |    362 +
 .../src/test/resources/jobads.new               |  12869 +
 .../src/test/resources/jobads.old               |   1106 +
 .../src/test/resources/jobads.txt               |  12869 +
 .../src/test/resources/testdata.txt             | 684032 ++++++++++++++++
 .../src/test/resources/tests.txt                |    365 +
 .../record-parser/record-parser.1.ddl.aql       |     34 +
 .../record-parser/record-parser.2.query.aql     |     27 +
 .../integrationts/library/testsuite.xml         |      7 +
 .../metadata/declared/AqlMetadataProvider.java  |      4 +-
 .../metadata/feeds/FeedMetadataUtil.java        |     16 +-
 205 files changed, 1473449 insertions(+), 4520 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/.gitattributes
----------------------------------------------------------------------
diff --git a/.gitattributes b/.gitattributes
index 803e30c..2451378 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -15,3 +15,6 @@
 
 # Declare files that will always have LF line endings on checkout.
 *.lf test eol=lf
+
+# Explicitly declare binary files
+*testdata.txt binary

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/data/external-parser/August16-20-long.txt
----------------------------------------------------------------------
diff --git a/asterix-app/data/external-parser/August16-20-long.txt b/asterix-app/data/external-parser/August16-20-long.txt
new file mode 100644
index 0000000..7a1abd7
--- /dev/null
+++ b/asterix-app/data/external-parser/August16-20-long.txt
@@ -0,0 +1,1106 @@
+MaxWallTimeMins_RAW = 1315
+CRAB_ASOTimeout = 86400
+MaxHosts = 1
+RequestMemory_RAW = 2000
+CRAB_TFileOutputFiles = {  }
+User = "uscms5616@cms"
+JobFinishedHookDone = 1439847319
+DAG_NodesReady = 0
+OnExitHold = ( ExitCode =!= undefined && ExitCode != 0 )
+CoreSize = -1
+CRAB_DashboardTaskType = "analysis"
+DAG_NodesDone = 25
+CRAB_Attempt = 0
+LastHoldReason = "Spooling input data files"
+WantRemoteSyscalls = false
+MyType = "Job"
+CumulativeSuspensionTime = 0
+MinHosts = 1
+ReleaseReason = "Data files spooled"
+PeriodicHold = false
+PeriodicRemove = ( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )
+Err = "_condor_stderr"
+CRAB_AdditionalOutputFiles = { "combine_output.tar" }
+ProcId = 0
+CRAB_UserGroup = "dcms"
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+EnteredCurrentStatus = 1439847319
+CRAB_SiteWhitelist = {  }
+NumJobStarts = 1
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage"
+JobUniverse = 7
+AutoClusterId = 10378
+In = "/dev/null"
+SUBMIT_TransferOutputRemaps = "_condor_stdout=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.out;_condor_stderr=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.err"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/73/uscms5616/150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+Requirements = true || false && TARGET.OPSYS == "LINUX" && TARGET.ARCH == "X86_64" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory
+CRAB_SplitAlgo = "EventBased"
+CRAB_UserDN = "/C=DE/O=GermanGrid/OU=KIT/CN=Andrew Gilbert"
+ClusterId = 1217455
+WhenToTransferOutput = "ON_EXIT"
+CRAB_AsyncDest = "T2_CH_CERN"
+CompletionDate = 1439847319
+OtherJobRemoveRequirements = DAGManJobId =?= ClusterId
+CRAB_FailedNodeLimit = -1
+BufferSize = 524288
+CRAB_RestURInoAPI = "/crabserver/prod"
+Environment = strcat("PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=",ClusterId,".",ProcId," CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local")
+TargetType = "Machine"
+LeaveJobInQueue = JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )
+CRAB_UserRole = undefined
+JobNotification = 0
+Owner = "uscms5616"
+CondorPlatform = "$CondorPlatform: X86_64-ScientificLinux_6.6 $"
+CRAB_UserHN = "agilbert"
+CommittedTime = 0
+X509UserProxy = "63f0c4d862d8b4e4ddcfd29ed85b6b5899660759"
+QDate = 1439764883
+ExitStatus = 0
+DAG_NodesFailed = 0
+RootDir = "/"
+JobCurrentStartDate = 1439764892
+CurrentHosts = 0
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1217455.0#1439764883"
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+RemoteSysCpu = 0.0
+TotalSuspensions = 0
+WantCheckpoint = false
+CRAB_RestHost = "cmsweb.cern.ch"
+CRAB_RetryOnASOFailures = 1
+Args = "RunJobs.dag"
+TransferInput = "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz"
+CRAB_JobArch = "slc6_amd64_gcc491"
+PeriodicRelease = false
+CRAB_TaskWorker = "vocms052"
+NumCkpts_RAW = 0
+CondorVersion = "$CondorVersion: 8.3.1 Jun 19 2015 $"
+RemoteCondorSetup = ""
+Out = "_condor_stdout"
+ShouldTransferFiles = "YES"
+DAG_NodesPrerun = 0
+DiskUsage = 1
+JobRunCount = 1
+CumulativeSlotTime = 82427.0
+CommittedSlotTime = 0
+LocalUserCpu = 0.0
+CRAB_SiteBlacklist = { "T2_FR_CCIN2P3","T1_IT_CNAF","T1_ES_PIC","T1_UK_RAL","T2_FI_HIP","T2_US_Nebraska" }
+DAG_NodesQueued = 0
+CRAB_JobCount = 25
+JobStartDate = 1439764892
+DAG_Status = 0
+CRAB_AlgoArgs = "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"splitOnRun\": false, \"events_per_job\": 1}, \"halt_job_on_file_boundaries\": false}"
+CRAB_SaveLogsFlag = 0
+CRAB_JobType = "analysis"
+CRAB_TransferOutputs = 1
+ExitBySignal = false
+StreamErr = false
+RemoveKillSig = "SIGUSR1"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+NumRestarts = 0
+NumSystemHolds = 0
+RequestDisk = DiskUsage
+OrigMaxHosts = 1
+JobPrio = 10
+NumCkpts = 0
+BufferBlockSize = 32768
+StageInStart = 1439764886
+ImageSize = 100
+MaxWallTimeMins = 1400
+DiskUsage_RAW = 1
+DAG_NodesUnready = 0
+CommittedSuspensionTime = 0
+CRAB_NumAutomJobRetries = 2
+CRAB_UserVO = "cms"
+CRAB_EDMOutputFiles = {  }
+Cmd = "dag_bootstrap_startup.sh"
+LocalSysCpu = 0.0
+Iwd = "/data/condor_local/spool/7455/0/cluster1217455.proc0.subproc0"
+LastHoldReasonCode = 16
+CRAB_PublishName = "prefit_cms_asimov_A1_5D-59ffde2b5d41be5f0c401d0a6a8a0194"
+CRAB_LumiMask = "{}"
+DAG_InRecovery = 0
+CRAB_MaxPost = 20
+TaskType = "ROOT"
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+LastSuspensionTime = 0
+CRAB_PublishGroupName = 0
+TransferOutputRemaps = undefined
+TransferOutput = "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001"
+CRAB_Workflow = "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+CRAB_JobSW = "CMSSW_7_4_0_pre9"
+DAG_NodesPostrun = 0
+ExitCode = 0
+JobStatus = 4
+RemoteWallClockTime = 82427.0
+ImageSize_RAW = 100
+OnExitRemove = ( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )
+DAG_NodesTotal = 25
+CRAB_InputData = "/MinBias"
+SUBMIT_x509userproxy = "/data/certs/creds/63f0c4d862d8b4e4ddcfd29ed85b6b5899660759"
+StreamOut = false
+CRAB_ReqName = "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D"
+CurrentTime = time()
+HoldKillSig = "SIGUSR1"
+RequestMemory = 2000
+NiceUser = false
+RemoteUserCpu = 0.0
+CRAB_Publish = 0
+RequestCpus = 1
+SUBMIT_Iwd = "/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9"
+WantRemoteIO = true
+CRAB_BlacklistT1 = 0
+StageInFinish = 1439764891
+LastJobStatus = 2
+
+MaxWallTimeMins_RAW = 1315
+CRAB_ASOTimeout = 86400
+MaxHosts = 1
+RequestMemory_RAW = 2000
+CRAB_TFileOutputFiles = {  }
+User = "uscms5050@cms"
+JobFinishedHookDone = 1439773907
+DAG_NodesReady = 0
+OnExitHold = ( ExitCode =!= undefined && ExitCode != 0 )
+CoreSize = -1
+CRAB_DashboardTaskType = "analysis"
+DAG_NodesDone = 30
+CRAB_Attempt = 0
+LastHoldReason = "Spooling input data files"
+WantRemoteSyscalls = false
+MyType = "Job"
+CumulativeSuspensionTime = 0
+MinHosts = 1
+ReleaseReason = "Data files spooled"
+PeriodicHold = false
+PeriodicRemove = ( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )
+Err = "_condor_stderr"
+CRAB_AdditionalOutputFiles = {  }
+ProcId = 0
+CRAB_UserGroup = undefined
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+EnteredCurrentStatus = 1439773907
+CRAB_SiteWhitelist = { "T3_US_FNALLPC","T2_US_Purdue","T2_US_Nebraska" }
+NumJobStarts = 1
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage"
+JobUniverse = 7
+AutoClusterId = 10378
+In = "/dev/null"
+SUBMIT_TransferOutputRemaps = "_condor_stdout=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.out;_condor_stderr=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.err"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/67/uscms5050/150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+Requirements = true || false && TARGET.OPSYS == "LINUX" && TARGET.ARCH == "X86_64" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory
+CRAB_SplitAlgo = "EventBased"
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ferencek/CN=650164/CN=Dinko Ferencek"
+ClusterId = 1206367
+WhenToTransferOutput = "ON_EXIT"
+CRAB_AsyncDest = "T3_US_FNALLPC"
+CompletionDate = 1439773907
+OtherJobRemoveRequirements = DAGManJobId =?= ClusterId
+CRAB_FailedNodeLimit = -1
+BufferSize = 524288
+CRAB_RestURInoAPI = "/crabserver/prod"
+Environment = strcat("PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=",ClusterId,".",ProcId," CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local")
+TargetType = "Machine"
+LeaveJobInQueue = JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )
+x509userproxyexpiration = 1440294044
+CRAB_UserRole = undefined
+JobNotification = 0
+Owner = "uscms5050"
+CondorPlatform = "$CondorPlatform: X86_64-ScientificLinux_6.6 $"
+CRAB_UserHN = "ferencek"
+CommittedTime = 0
+X509UserProxy = "3a7798796bc24a800001338917ec45991bcf0a96"
+QDate = 1439615565
+ExitStatus = 0
+DAG_NodesFailed = 0
+RootDir = "/"
+JobCurrentStartDate = 1439615574
+CurrentHosts = 0
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1206367.0#1439615565"
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+RemoteSysCpu = 0.0
+TotalSuspensions = 0
+WantCheckpoint = false
+CRAB_RestHost = "cmsweb.cern.ch"
+CRAB_RetryOnASOFailures = 1
+Args = "RunJobs.dag"
+TransferInput = "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz"
+CRAB_JobArch = "slc6_amd64_gcc481"
+PeriodicRelease = false
+CRAB_TaskWorker = "vocms052"
+NumCkpts_RAW = 0
+CondorVersion = "$CondorVersion: 8.3.1 Jun 19 2015 $"
+RemoteCondorSetup = ""
+Out = "_condor_stdout"
+ShouldTransferFiles = "YES"
+DAG_NodesPrerun = 0
+DiskUsage = 1
+JobRunCount = 1
+CumulativeSlotTime = 158333.0
+CommittedSlotTime = 0
+LocalUserCpu = 0.0
+CRAB_SiteBlacklist = {  }
+DAG_NodesQueued = 0
+CRAB_JobCount = 30
+JobStartDate = 1439615574
+DAG_Status = 0
+CRAB_AlgoArgs = "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"lheInputFiles\": true, \"splitOnRun\": false, \"events_per_job\": 50000}, \"halt_job_on_file_boundaries\": false}"
+CRAB_SaveLogsFlag = 0
+CRAB_JobType = "analysis"
+CRAB_TransferOutputs = 1
+ExitBySignal = false
+StreamErr = false
+RemoveKillSig = "SIGUSR1"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+NumRestarts = 0
+NumSystemHolds = 0
+RequestDisk = DiskUsage
+OrigMaxHosts = 1
+JobPrio = 10
+NumCkpts = 0
+BufferBlockSize = 32768
+StageInStart = 1439615569
+ImageSize = 100
+MaxWallTimeMins = 1400
+DiskUsage_RAW = 1
+DAG_NodesUnready = 0
+CommittedSuspensionTime = 0
+CRAB_NumAutomJobRetries = 2
+CRAB_UserVO = "cms"
+CRAB_EDMOutputFiles = { "Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE.root" }
+Cmd = "dag_bootstrap_startup.sh"
+LocalSysCpu = 0.0
+Iwd = "/data/condor_local/spool/6367/0/cluster1206367.proc0.subproc0"
+LastHoldReasonCode = 16
+CRAB_PublishName = "LHE-17521057f93ed9cadf21dd45b3505145"
+CRAB_LumiMask = "{}"
+DAG_InRecovery = 0
+CRAB_MaxPost = 20
+TaskType = "ROOT"
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+LastSuspensionTime = 0
+CRAB_PublishGroupName = 0
+TransferOutputRemaps = undefined
+TransferOutput = "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001"
+CRAB_Workflow = "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+CRAB_JobSW = "CMSSW_7_1_18"
+DAG_NodesPostrun = 0
+ExitCode = 0
+JobStatus = 4
+RemoteWallClockTime = 158333.0
+ImageSize_RAW = 100
+OnExitRemove = ( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )
+DAG_NodesTotal = 30
+CRAB_InputData = "/Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8"
+SUBMIT_x509userproxy = "/data/certs/creds/3a7798796bc24a800001338917ec45991bcf0a96"
+StreamOut = false
+CRAB_ReqName = "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE"
+CurrentTime = time()
+HoldKillSig = "SIGUSR1"
+RequestMemory = 2000
+NiceUser = false
+RemoteUserCpu = 0.0
+CRAB_Publish = 1
+RequestCpus = 1
+SUBMIT_Iwd = "/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB"
+WantRemoteIO = true
+CRAB_BlacklistT1 = 0
+StageInFinish = 1439615572
+LastJobStatus = 2
+
+MaxWallTimeMins_RAW = 2800
+StatsLifetimeStarter = 165949
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "grid_cms"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins3@cmsgwms-factory.fnal.gov"
+SubmitEventNotes = "DAG Node: Job53"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+DAGParentNodeNames = ""
+MATCH_GLIDEIN_Site = "CERN"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 163084.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "59069"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+RecentBlockWrites = 0
+CurrentHosts = 0
+MATCH_GLIDEIN_ProcId = 1
+x509UserProxyExpiration = 1440397268
+Iwd = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_CH_CERN_ce302"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 75000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/128.142.45.103"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "689255460"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job53"
+LastPublicClaimId = "<128.142.45.103:55332>#1439963327#3#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_CH_CERN"
+RemoteSysCpu = 1963.0
+CRAB_Retry = 2
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1238992
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2800"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms5111"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_CH_CERN_ce302"
+LastJobLeaseRenewal = 1440131524
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_CH_CERN"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.main"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.kbutanov"
+MATCH_GLIDEIN_SiteWMS_Slot = "Unknown"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms5111@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm-eoscms.cern.ch"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2800
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440131525
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 1
+MATCH_GLIDEIN_Factory = "gfactory_service"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1233705.0#1439964847"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 59069
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1439965573
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/8123da6528ec4abd24562a99b4f2b0ec556bed0b"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440530096"
+MATCH_EXP_JOB_GLIDEIN_Factory = "gfactory_service"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.main"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 2128005.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=53 --cmsswVersion=CMSSW_7_4_7 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v2/50000/6E2F932B-633B-E511-A7AE-F04DA23BCE4C.root\"] --runAndLumis=job_lumis_53.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.53"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 165965.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_CH_CERN"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 53
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2800"
+MATCH_GLIDEIN_ToRetire = 1440530096
+ImageSize = 4250000
+JobCurrentStartDate = 1439965560
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1439965560
+LastMatchTime = 1439965560
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440564896"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+NumJobReconnects = 2
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins3@cmsgwms-factory.fnal.gov"
+SpooledOutputFiles = "jobReport.json.53"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.53"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 165965.0
+JobStatus = 4
+x509UserProxyEmail = "khakimjan.butanov@cern.ch"
+DAGManJobId = 1035690
+RemoteWallClockTime = 165965.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov"
+LastRemoteHost = "glidein_9757_931570227@b635ef6906.cern.ch"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T1_DE_KIT,T2_UK_London_IC,T2_CH_CERN"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 61434
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "1"
+CRAB_localOutputFiles = "stepB_MC.root=stepB_MC_53.root"
+MaxHosts = 1
+CRAB_UserHN = "kbutanov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm-eoscms.cern.ch"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms5111"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 4095188
+MATCH_EXP_Used_Gatekeeper = "ce302.cern.ch:8443/cream-lsf-grid_cms"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "grid_cms"
+MATCH_GLIDEIN_Gatekeeper = "ce302.cern.ch:8443/cream-lsf-grid_cms"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "CERN"
+UserLog = "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM#85bfee36-3b82-11e5-be34-001e67abf518"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 2
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1439964847
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.53"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/log/cmsRun_53.log.tar.gz, srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/stepB_MC_53.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "stepB_MC.root" }
+AutoClusterId = 16275
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439209593
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 2
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "CERN"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_KR_KNU"
+ClusterId = 1233705
+BytesSent = 119952.0
+CRAB_PublishName = "crab_25ns_WJetsToLNu_HT600_800-9da7f68dc2032d8626d7e7822bb10506"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/user/kbutanov.03af76ad04ddc195ee96e6a5469f1bbb1777390d/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_CH_CERN"
+MATCH_GLIDEIN_MaxMemMBs = 2800
+RequestMemory = 2000
+EnteredCurrentStatus = 1440131525
+MATCH_GLIDEIN_SiteWMS = "LSF"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/68/uscms5111/150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "689255460"
+CRAB_JobSW = "CMSSW_7_4_7"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 2800
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "Unknown"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440131525
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440564896
+NiceUser = false
+RootDir = "/"
+CommittedTime = 165965
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "LSF"
+
+MaxWallTimeMins_RAW = 1400
+StatsLifetimeStarter = 33352
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "red.unl.edu"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins6@glidein.grid.iu.edu"
+SubmitEventNotes = "DAG Node: Job4"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+MATCH_GLIDEIN_Site = "Nebraska"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 28513.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "2561111"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+CurrentHosts = 0
+RecentBlockWrites = 0
+MATCH_GLIDEIN_ProcId = 8
+x509UserProxyExpiration = 1440171330
+Iwd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_long"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 3750000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/129.93.182.12"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "5092137.0"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job4"
+LastPublicClaimId = "<129.93.182.12:42491>#1440048812#7#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_US_Nebraska"
+RemoteSysCpu = 616.0
+CRAB_Retry = 3
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1148372
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2500"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms3850"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_long"
+LastJobLeaseRenewal = 1440115142
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_US_Nebraska"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.mrodozov"
+MATCH_GLIDEIN_SiteWMS_Slot = "slot1_6@red-d8n12.unl.edu"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms3850@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm.unl.edu"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2500
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440115142
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 0
+MATCH_GLIDEIN_Factory = "OSGGOC"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1235992.0#1440081300"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 2561111
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1440081789
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440616411"
+MATCH_EXP_JOB_GLIDEIN_Factory = "OSGGOC"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 44879356.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=4 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9A89CA60-69FC-E411-9661-0025905C42B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/34F8B66A-D4FB-E411-8F89-842B2B29273C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/7CE6B848-F5FB-E411-A605-0025905A60A8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9E842AA8-54FC-E411-8BC7-000F53273500.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/C44AD465-D4FB-E411-8704-002590200A40.root\",' '\"/store/m
 c/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/90B6CB1B-07FD-E411-BD52-001E67397CBA.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/183FB65F-69FC-E411-A5A8-0025904B7C26.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/5A0A9A0E-EDFB-E411-B95F-00266CF330B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/3E3768F1-61FC-E411-B163-002618943956.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/08DB9DDE-F4FB-E411-9BC9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/CE293F9B-54FC-E411-83E8-AC853D9DACD3.root\",' '\"/store/mc/RunIISpring
 15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/A4479F5F-69FC-E411-B0B5-0025904C6378.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/0419455F-D4FB-E411-AEFA-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E6BD6C76-54FC-E411-A1F2-AC853D9DACD7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/AC15F863-F5FB-E411-8F07-002590DB9286.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/CC9B7EE2-F4FB-E411-BCD9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/5844575F-D4FB-E411-81F5-003048FFD732.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt
 _300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/6EC5205E-D4FB-E411-9885-001E67396BB7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/B63200E8-69FC-E411-B949-0025904C51FC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/14554A42-54FC-E411-86D2-0025905A605E.root\"] --runAndLumis=job_lumis_4.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.4"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 33360.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_US_Nebraska"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 4
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2500"
+MATCH_GLIDEIN_ToRetire = 1440616411
+ImageSize = 1750000
+JobCurrentStartDate = 1440081782
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1440081782
+LastMatchTime = 1440081782
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440651211"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins6@glidein.grid.iu.edu"
+SpooledOutputFiles = "jobReport.json.4"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.4"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 33360.0
+JobStatus = 4
+x509UserProxyEmail = "mircho.nikolaev.rodozov@cern.ch"
+DAGManJobId = 1183604
+RemoteWallClockTime = 33360.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+LastRemoteHost = "glidein_1936_57194584@red-d8n12.unl.edu"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 3661158
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "8"
+CRAB_localOutputFiles = "results.root=results_4.root"
+MaxHosts = 1
+CRAB_UserHN = "mrodozov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm.unl.edu"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms3850"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 1727056
+MATCH_EXP_Used_Gatekeeper = "red.unl.edu red.unl.edu:9619"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "red.unl.edu"
+MATCH_GLIDEIN_Gatekeeper = "red.unl.edu red.unl.edu:9619"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "Nebraska"
+UserLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 42
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1440081300
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.4"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_4.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_4.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "results.root" }
+AutoClusterId = 16278
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439550850
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 3
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "Nebraska"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_CH_CERN"
+ClusterId = 1235992
+BytesSent = 597241.0
+CRAB_PublishName = "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_US_Nebraska"
+MATCH_GLIDEIN_MaxMemMBs = 2500
+RequestMemory = 2000
+EnteredCurrentStatus = 1440115142
+MATCH_GLIDEIN_SiteWMS = "HTCondor"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "5092137.0"
+CRAB_JobSW = "CMSSW_7_4_7_patch2"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 1400
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "slot1_6@red-d8n12.unl.edu"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440115142
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440651211
+NiceUser = false
+RootDir = "/"
+CommittedTime = 33360
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "HTCondor"
+
+MaxWallTimeMins_RAW = 1400
+StatsLifetimeStarter = 31968
+CRAB_SaveLogsFlag = 1
+JOB_GLIDEIN_ProcId = "$$(GLIDEIN_ProcId:Unknown)"
+StreamOut = false
+JOB_GLIDEIN_Entry_Name = "$$(GLIDEIN_Entry_Name:Unknown)"
+CRAB_ReqName = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+use_x509userproxy = true
+JOB_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CRAB_SiteBlacklist = {  }
+CRAB_UserRole = undefined
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue = "red-gw1.unl.edu"
+TaskType = "Job"
+NumRestarts = 0
+MATCH_GLIDEIN_Schedd = "schedd_glideins5@gfactory-1.t2.ucsd.edu"
+SubmitEventNotes = "DAG Node: Job3"
+x509UserProxyVOName = "cms"
+RecentBlockWriteKbytes = 0
+MATCH_GLIDEIN_Site = "Nebraska"
+RecentBlockReadKbytes = 0
+LocalUserCpu = 0.0
+RemoteUserCpu = 27257.0
+MATCH_GLIDEIN_Max_Walltime = 603000
+MATCH_EXP_JOB_GLIDEIN_ClusterId = "3043383"
+JOB_GLIDEIN_SiteWMS_Queue = "$$(GLIDEIN_SiteWMS_Queue:Unknown)"
+CRAB_StageoutPolicy = "local,remote"
+CRAB_Workflow = "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+CurrentHosts = 0
+RecentBlockWrites = 0
+MATCH_GLIDEIN_ProcId = 14
+x509UserProxyExpiration = 1440171330
+Iwd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0"
+MATCH_EXP_JOB_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_gw1_long"
+NumShadowStarts = 1
+JobPrio = 10
+DiskUsage = 4250000
+CRAB_ASOTimeout = 86400
+StartdPrincipal = "execute-side@matchsession/129.93.183.127"
+JOB_GLIDEIN_ToDie = "$$(GLIDEIN_ToDie:Unknown)"
+JobRunCount = 1
+MachineAttrSlotWeight0 = 1
+JOB_Site = "$$(GLIDEIN_Site:Unknown)"
+WantCheckpoint = false
+BlockWriteKbytes = 0
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId = "5096573.0"
+RequestDisk = 100000
+TotalSuspensions = 0
+DAGNodeName = "Job3"
+LastPublicClaimId = "<129.93.183.127:56441>#1440063351#7#..."
+RequestDisk_RAW = 1
+PeriodicRemove = ( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )
+JOBGLIDEIN_CMSSite = "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])"
+MATCH_GLIDEIN_CMSSite = "T2_US_Nebraska"
+RemoteSysCpu = 621.0
+CRAB_Retry = 3
+MyType = "Job"
+CRAB_JobType = "analysis"
+PeriodicHold = false
+ResidentSetSize_RAW = 1174388
+JOB_GLIDEIN_Job_Max_Time = "$$(GLIDEIN_Job_Max_Time:Unknown)"
+EnvDelim = ";"
+MATCH_EXP_JOB_GLIDEIN_Memory = "2500"
+CRAB_RestHost = "cmsweb.cern.ch"
+Owner = "uscms3850"
+JOB_GLIDEIN_SiteWMS_JobId = "$$(GLIDEIN_SiteWMS_JobId:Unknown)"
+MATCH_GLIDEIN_Entry_Name = "CMS_T2_US_Nebraska_Red_gw1_long"
+LastJobLeaseRenewal = 1440113502
+MATCH_EXP_JOB_GLIDEIN_CMSSite = "T2_US_Nebraska"
+CRAB_AdditionalOutputFiles = {  }
+OnExitHold = false
+CRAB_ASOURL = "https://cmsweb.cern.ch/couchdb"
+MATCH_EXP_JOB_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+CRAB_NumAutomJobRetries = 2
+AccountingGroup = "analysis.mrodozov"
+MATCH_GLIDEIN_SiteWMS_Slot = "slot1_32@red-d23n7.unl.edu"
+WantRemoteSyscalls = false
+ExitStatus = 0
+User = "uscms3850@cms"
+JobLeaseDuration = 1200
+MATCH_GLIDEIN_SEs = "srm.unl.edu"
+JOB_Gatekeeper = ifthenelse(substr(Used_Gatekeeper,0,1) =!= "$",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,"Unknown"))
+MATCH_Memory = 2500
+DESIRED_OpSyses = "LINUX"
+CompletionDate = 1440113503
+WhenToTransferOutput = "ON_EXIT_OR_EVICT"
+RequestCpus = 1
+ExecutableSize = 7
+x509UserProxyFirstFQAN = "/cms/Role=NULL/Capability=NULL"
+CommittedSuspensionTime = 0
+PreJobPrio1 = 0
+MATCH_GLIDEIN_Factory = "SDSC"
+GlobalJobId = "crab3-1@submit-5.t2.ucsd.edu#1235991.0#1440081300"
+CRAB_ISB = "https://cmsweb.cern.ch/crabcache"
+StreamErr = false
+TerminationPending = true
+DAGManNodesLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log"
+Rank = 0.0
+JOB_GLIDEIN_SiteWMS = "$$(GLIDEIN_SiteWMS:Unknown)"
+TransferInput = "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz"
+JobUniverse = 5
+MATCH_GLIDEIN_ClusterId = 3043383
+PeriodicRelease = ( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )
+MATCH_EXP_JOB_GLIDEIN_Job_Max_Time = "34800"
+JobCurrentStartExecutingDate = 1440081533
+CRAB_oneEventMode = 0
+x509userproxy = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9"
+MATCH_EXP_JOB_GLIDEIN_ToRetire = "1440630710"
+MATCH_EXP_JOB_GLIDEIN_Factory = "SDSC"
+JOB_GLIDEIN_SEs = "$$(GLIDEIN_SEs:Unknown)"
+JobNotification = 0
+CRAB_DBSURL = "https://cmsweb.cern.ch/dbs/prod/global/DBSReader"
+ProcId = 0
+JOB_GLIDEIN_MaxMemMBs = "$$(GLIDEIN_MaxMemMBs:Unknown)"
+MATCH_GLIDECLIENT_Name = "CMSG-v1_0.overflow"
+Used_Gatekeeper = "$$(GLIDEIN_Gatekeeper:Unknown)"
+CondorVersion = "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $"
+BlockReadKbytes = 0
+BytesRecvd = 44879356.0
+Arguments = "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=3 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/06DE7D5F-D4FB-E411-9C85-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E29E093E-54FC-E411-8AE5-0025905A60FE.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/F0FDF730-EDFB-E411-842B-00261834B51D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/FECCF363-F5FB-E411-85A3-002590DBDFE0.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/0E4CEBFE-ECFB-E411-9F0C-842B2B29273C.root\",' '\"/store/m
 c/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/ECF66DCC-F0FB-E411-84CF-00259074AE32.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/96F29C69-D4FB-E411-9028-842B2B292627.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/6E887F0F-EDFB-E411-875B-BCAEC54B303A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/0C788712-F5FB-E411-AA0E-AC853D9DAC29.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/723A41AE-F4FB-E411-BAA3-0025905C431A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/DA4EA0F5-F4FB-E411-B2AD-00259073E31C.root\",' '\"/store/mc/RunIISpring
 15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/98C8F097-F7FB-E411-9A1F-52540006FB8D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/DE4F8235-5FFC-E411-80CD-0025905A6088.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/EA5D6151-F5FB-E411-99F0-0026B92E0C74.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/205D0CF9-F4FB-E411-934D-000F532734AC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/24BCAED9-F0FB-E411-A35B-00259074AE54.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/A4C160C1-F4FB-E411-A66D-B083FED76C6C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt
 _300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E86B536C-54FC-E411-8787-AC853D9DACE1.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/2E68E42D-EDFB-E411-8027-001E67397CC9.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/A056D12B-EDFB-E411-9E51-52540006FDD6.root\"] --runAndLumis=job_lumis_3.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}"
+ShouldTransferFiles = "YES"
+Out = "job_out.3"
+JOB_GLIDEIN_Memory = "$$(Memory:Unknown)"
+NumJobMatches = 1
+CumulativeSlotTime = 31976.0
+OnExitRemove = true
+ResidentSetSize = 1250000
+SpoolOnEvict = false
+JOB_GLIDEIN_Max_Walltime = "$$(GLIDEIN_Max_Walltime:Unknown)"
+JobAdInformationAttrs = "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu"
+In = "/dev/null"
+LastJobStatus = 2
+CumulativeSuspensionTime = 0
+MemoryUsage = ( ( ResidentSetSize + 1023 ) / 1024 )
+MATCH_EXP_JOB_CMSSite = "T2_US_Nebraska"
+CRAB_TaskWorker = "vocms052"
+OrigMaxHosts = 1
+TransferIn = false
+CRAB_Id = 3
+JOB_GLIDEIN_Name = "$$(GLIDEIN_Name:Unknown)"
+WantRemoteIO = true
+MATCH_EXP_JOB_GLIDEIN_MaxMemMBs = "2500"
+MATCH_GLIDEIN_ToRetire = 1440630710
+ImageSize = 2000000
+JobCurrentStartDate = 1440081527
+ExecutableSize_RAW = 6
+x509userproxysubject = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+NumJobStarts = 1
+DESIRED_Overflow_Region = regexps("T[12]_US_",DESIRED_Sites,"US")
+AutoClusterAttrs = "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements"
+Cmd = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh"
+BlockReads = 0
+JobStartDate = 1440081527
+LastMatchTime = 1440081527
+MATCH_EXP_JOB_GLIDEIN_ToDie = "1440665510"
+JOB_GLIDEIN_CMSSite = "$$(GLIDEIN_CMSSite:Unknown)"
+CoreSize = -1
+MATCH_EXP_JOB_GLIDEIN_Schedd = "schedd_glideins5@gfactory-1.t2.ucsd.edu"
+SpooledOutputFiles = "jobReport.json.3"
+TargetType = "Machine"
+TransferOutput = "jobReport.json.3"
+job_ad_information_attrs = MATCH_GLIDEIN_Gatekeeper
+CommittedSlotTime = 31976.0
+JobStatus = 4
+x509UserProxyEmail = "mircho.nikolaev.rodozov@cern.ch"
+DAGManJobId = 1183604
+RemoteWallClockTime = 31976.0
+NumSystemHolds = 0
+CRAB_UserDN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov"
+LastRemoteHost = "glidein_11321_920434792@red-d23n7.unl.edu"
+MATCH_EXP_JOB_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_Site = "$$(GLIDEIN_Site:Unknown)"
+AcctGroup = "analysis"
+Requirements = ( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= "rhel6" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == "X86_64" ) && ( TARGET.OpSys == "LINUX" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )
+CRAB_EDMOutputFiles = {  }
+RecentBlockReads = 0
+DESIRED_SITES = "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL"
+NumCkpts = 0
+CMS_ALLOW_OVERFLOW = "True"
+RequestMemory_RAW = 2000
+DiskUsage_RAW = 4111436
+DAGManNodesMask = "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27"
+MATCH_EXP_JOB_GLIDEIN_ProcId = "14"
+CRAB_localOutputFiles = "results.root=results_3.root"
+MaxHosts = 1
+CRAB_UserHN = "mrodozov"
+MATCH_EXP_JOB_GLIDEIN_Max_Walltime = "603000"
+MATCH_EXP_JOB_GLIDEIN_SEs = "srm.unl.edu"
+JOB_GLIDEIN_SiteWMS_Slot = "$$(GLIDEIN_SiteWMS_Slot:Unknown)"
+CRAB_InputData = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM"
+CondorPlatform = "$CondorPlatform: X86_64-RedHat_6.6 $"
+BlockWrites = 0
+AcctGroupUser = "uscms3850"
+MATCH_GLIDEIN_Job_Max_Time = 34800
+ImageSize_RAW = 1756756
+MATCH_EXP_Used_Gatekeeper = "red-gw1.unl.edu red-gw1.unl.edu:9619"
+JOB_GLIDECLIENT_Name = "$$(GLIDECLIENT_Name:Unknown)"
+LocalSysCpu = 0.0
+LastSuspensionTime = 0
+MATCH_GLIDEIN_SiteWMS_Queue = "red-gw1.unl.edu"
+MATCH_GLIDEIN_Gatekeeper = "red-gw1.unl.edu red-gw1.unl.edu:9619"
+RecentStatsLifetimeStarter = 1200
+MATCH_EXP_JOB_GLIDEIN_Site = "Nebraska"
+UserLog = "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log"
+CRAB_TransferOutputs = 1
+CRAB_DataBlock = "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c"
+Env = "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local"
+CRAB_BlacklistT1 = 0
+JOB_GLIDEIN_Factory = "$$(GLIDEIN_Factory:Unknown)"
+TransferInputSizeMB = 42
+MachineAttrCpus0 = 1
+CRAB_RestURInoAPI = "/crabserver/prod"
+CRAB_JobArch = "slc6_amd64_gcc491"
+QDate = 1440081300
+CRAB_PublishGroupName = 0
+CRAB_PublishDBSURL = "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter"
+x509UserProxyFQAN = "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL"
+Err = "job_err.3"
+CRAB_SiteWhitelist = {  }
+CRAB_Destination = "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_3.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_3.root"
+CRAB_RetryOnASOFailures = 1
+CRAB_TFileOutputFiles = { "results.root" }
+AutoClusterId = 16278
+ExitCode = 0
+accounting_group = analysis
+PostJobPrio1 = -1439550850
+ExitBySignal = false
+CRAB_UserGroup = undefined
+PostJobPrio2 = 3
+PeriodicRemoveReason = ifThenElse(MemoryUsage > RequestMemory,"Removed due to memory use",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,"Removed due to wall clock limit",ifThenElse(DiskUsage > 100000000,"Removed due to disk usage",ifThenElse(time() > x509UserProxyExpiration,"Removed job due to proxy expiration","Removed due to job being held"))))
+MATCH_EXP_JOB_Site = "Nebraska"
+BufferBlockSize = 32768
+CRAB_AsyncDest = "T2_CH_CERN"
+ClusterId = 1235991
+BytesSent = 604821.0
+CRAB_PublishName = "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac"
+CRAB_Publish = 1
+CRAB_Dest = "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316"
+MATCH_EXP_JOBGLIDEIN_CMSSite = "T2_US_Nebraska"
+MATCH_GLIDEIN_MaxMemMBs = 2500
+RequestMemory = 2000
+EnteredCurrentStatus = 1440113503
+MATCH_GLIDEIN_SiteWMS = "HTCondor"
+CRAB_UserWebDir = "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8"
+JOB_GLIDEIN_ToRetire = "$$(GLIDEIN_ToRetire:Unknown)"
+MATCH_GLIDEIN_SiteWMS_JobId = "5096573.0"
+CRAB_JobSW = "CMSSW_7_4_7_patch2"
+BufferSize = 524288
+JOB_GLIDEIN_Schedd = "$$(GLIDEIN_Schedd:Unknown)"
+MaxWallTimeMins = 1400
+LeaveJobInQueue = false
+MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot = "slot1_32@red-d23n7.unl.edu"
+EncryptExecuteDirectory = false
+NumCkpts_RAW = 0
+DESIRED_Archs = "X86_64"
+JobFinishedHookDone = 1440113503
+DESIRED_OpSysMajorVers = "6"
+MinHosts = 1
+MATCH_GLIDEIN_Name = "gfactory_instance"
+JOB_GLIDEIN_ClusterId = "$$(GLIDEIN_ClusterId:Unknown)"
+MATCH_GLIDEIN_ToDie = 1440665510
+NiceUser = false
+RootDir = "/"
+CommittedTime = 31976
+MATCH_EXP_JOB_GLIDEIN_SiteWMS = "HTCondor"
+


[19/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser2/classad-parser2.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser2/classad-parser2.1.adm b/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser2/classad-parser2.1.adm
new file mode 100644
index 0000000..56abc61
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/external-library/classad-parser2/classad-parser2.1.adm
@@ -0,0 +1,5 @@
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1206367.0#1439615565", "CRAB_UserGroup": "undefined", "JobStartDate": 1439615574, "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=ferencek/CN=650164/CN=Dinko Ferencek", "JobStatus": 4, "CRAB_TFileOutputFiles": "{  }", "LeaveJobInQueue": "JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )", "DAG_InRecovery": 0, "AutoClusterId": 10378, "CRAB_TaskWorker": "vocms052", "OnExitRemove": "( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )", "MaxWallTimeMins_RAW": 1315, "JobCurrentStartDate": 1439615574, "CRAB_ASOTimeout": 86400, "CoreSize": -1, "CRAB_AsyncDest": "T3_US_FNALLPC", "StageInFinish": 1439615572, "ExitStatus": 0, "ReleaseReason": "Data files spooled", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "x509userproxyexpiration": 1440294044, "CurrentTime": "time()", "X509UserProxy": "3a7798796bc24a800001338
 917ec45991bcf0a96", "WantCheckpoint": false, "RemoteWallClockTime": 158333.0d, "In": "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "DiskUsage_RAW": 1, "DAG_Status": 0, "SUBMIT_x509userproxy": "/data/certs/creds/3a7798796bc24a800001338917ec45991bcf0a96", "EnteredCurrentStatus": 1439773907, "CRAB_RestURInoAPI": "/crabserver/prod", "HoldKillSig": "SIGUSR1", "RequestDisk": "DiskUsage", "MyType": "Job", "PeriodicRemove": "( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )", "RemoveKillSig": "SIGUSR1", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "dag_bootstrap_startup.sh", "CondorVersion": "$CondorVersion: 8.3.1 Jun 19 2015 $", "DAG_NodesReady": 0, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "CRAB_Workflow": "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "CRAB_UserRole": "undefined", "RemoteUserCpu
 ": 0.0d, "NiceUser": false, "CRAB_AlgoArgs": "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_on_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"lheInputFiles\": true, \"splitOnRun\": false, \"events_per_job\": 50000}, \"halt_job_on_file_boundaries\": false}", "Out": "_condor_stdout", "ImageSize_RAW": 100, "DAG_NodesPostrun": 0, "CRAB_JobArch": "slc6_amd64_gcc481", "CumulativeSuspensionTime": 0, "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_Requ
 estCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements,DiskUsage", "LastHoldReasonCode": 16, "NumCkpts": 0, "CRAB_BlacklistT1": 0, "Err": "_condor_stderr", "JobFinishedHookDone": 1439773907, "RequestMemory_RAW": 2000, "TransferOutputRemaps": "undefined", "ProcId": 0, "ImageSize": 100, "JobUniverse": 7, "DAG_NodesTotal": 30, "CRAB_JobType": "analysis", "SUBMIT_Iwd": "/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB", "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "OnExitHold": "( ExitCode =!= undefined && ExitCode != 0 )", "OrigMaxHosts": 1, "RequestMemory": 2000, "NumJobStarts": 1, "CRAB_UserHN": "ferencek", "LastHoldReason": "Spooling input data files", "TotalSuspensions": 0, "CRAB_FailedNodeLimit": -1, "ExitCode": 0, "CRAB_PublishName": "LHE-17521057f93ed9cadf21dd45b3505145", "CRAB_UserWebDir": "http://sub
 mit-5.t2.ucsd.edu/CSstoragePath/67/uscms5050/150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE", "JobNotification": 0, "CRAB_DashboardTaskType": "analysis", "SUBMIT_TransferOutputRemaps": "_condor_stdout=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.out;_condor_stderr=/data/srv/tmp/_150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHEF9C8tB/request.err", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "LocalUserCpu": 0.0d, "BufferBlockSize": 32768, "LastJobStatus": 2, "CommittedTime": 0, "CRAB_SaveLogsFlag": 0, "LastSuspensionTime": 0, "TaskType": "ROOT", "DAG_NodesDone": 30, "CumulativeSlotTime": 158333.0d, "TransferOutput": "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001", "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "OtherJobRemoveRequirements": "DAGManJobId =?= ClusterId", "CondorPlatform": "$C
 ondorPlatform: X86_64-ScientificLinux_6.6 $", "PeriodicRelease": false, "JobRunCount": 1, "CRAB_Publish": 1, "JobPrio": 10, "CRAB_TransferOutputs": 1, "CRAB_Attempt": 0, "LocalSysCpu": 0.0d, "RemoteSysCpu": 0.0d, "TransferInput": "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh, cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz", "PeriodicHold": false, "CRAB_NumAutomJobRetries": 2, "CRAB_LumiMask": "{}", "CRAB_InputData": "/Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8", "WantRemoteIO": true, "CommittedSuspensionTime": 0, "CRAB_JobSW": "CMSSW_7_1_18", "StageInStart": 1439615569, "CRAB_SiteWhitelist": "{ \"T3_US_FNALLPC\",\"T2_US_Purdue\",\"T2_US_Nebraska\" }", "CompletionDate": 1439773907, "StreamErr": false, "CRAB_RestHost": "cmsweb.cern.ch", "RemoteCondorSetup": "", "CRAB_ReqName": "150815_044810:ferencek_crab_Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13T
 eV-madgraph-pythia8_LHE", "DAG_NodesPrerun": 0, "WantRemoteSyscalls": false, "DAG_NodesQueued": 0, "DAG_NodesUnready": 0, "Owner": "uscms5050", "Requirements": "true || false && TARGET.OPSYS == \"LINUX\" && TARGET.ARCH == \"X86_64\" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory", "CRAB_JobCount": 30, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "CRAB_SplitAlgo": "EventBased", "DiskUsage": 1, "CRAB_MaxPost": 20, "ClusterId": 1206367, "BufferSize": 524288, "DAG_NodesFailed": 0, "MaxWallTimeMins": 1400, "CRAB_PublishGroupName": 0, "CommittedSlotTime": 0, "CRAB_SiteBlacklist": "{  }", "Args": "RunJobs.dag", "CRAB_EDMOutputFiles": "{ \"Stop2ToStop1H_Stop1M200_TuneCUETP8M1_13TeV-madgraph-pythia8_LHE.root\" }", "Environment": "strcat(\"PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=\",ClusterId,\".\",ProcId,\" CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local\")", "CRAB_UserVO": "cms", "Iwd": "/data/condor_local/spool/6367/0/cl
 uster1206367.proc0.subproc0", "QDate": 1439615565, "CurrentHosts": 0, "User": "uscms5050@cms", "StreamOut": false }
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1217455.0#1439764883", "CRAB_UserGroup": "dcms", "JobStartDate": 1439764892, "CRAB_UserDN": "/C=DE/O=GermanGrid/OU=KIT/CN=Andrew Gilbert", "JobStatus": 4, "CRAB_TFileOutputFiles": "{  }", "LeaveJobInQueue": "JobStatus == 4 && ( CompletionDate =?= UNDDEFINED || CompletionDate == 0 || ( ( time() - CompletionDate ) < 864000 ) )", "DAG_InRecovery": 0, "AutoClusterId": 10378, "CRAB_TaskWorker": "vocms052", "OnExitRemove": "( ExitSignal =?= 11 || ( ExitCode =!= undefined && ExitCode >= 0 && ExitCode <= 2 ) )", "MaxWallTimeMins_RAW": 1315, "JobCurrentStartDate": 1439764892, "CRAB_ASOTimeout": 86400, "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "StageInFinish": 1439764891, "ExitStatus": 0, "ReleaseReason": "Data files spooled", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "CurrentTime": "time()", "X509UserProxy": "63f0c4d862d8b4e4ddcfd29ed85b6b5899660759", "WantCheckpoint": false, "RemoteWallClockTime": 82427.0d, "In": 
 "/dev/null", "MaxHosts": 1, "RootDir": "/", "NumRestarts": 0, "DiskUsage_RAW": 1, "DAG_Status": 0, "SUBMIT_x509userproxy": "/data/certs/creds/63f0c4d862d8b4e4ddcfd29ed85b6b5899660759", "EnteredCurrentStatus": 1439847319, "CRAB_RestURInoAPI": "/crabserver/prod", "HoldKillSig": "SIGUSR1", "RequestDisk": "DiskUsage", "MyType": "Job", "PeriodicRemove": "( JobStatus == 5 ) && ( time() - EnteredCurrentStatus > 30 * 86400 )", "RemoveKillSig": "SIGUSR1", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "dag_bootstrap_startup.sh", "CondorVersion": "$CondorVersion: 8.3.1 Jun 19 2015 $", "DAG_NodesReady": 0, "CRAB_AdditionalOutputFiles": "{ \"combine_output.tar\" }", "ShouldTransferFiles": "YES", "CRAB_Workflow": "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "TargetType": "Machine", "MinHosts": 1, "NumCkpts_RAW": 0, "RequestCpus": 1, "CRAB_UserRole": "undefined", "RemoteUserCpu": 0.0d, "NiceUser": false, "CRAB_AlgoArgs": "{\"splitOnRun\": false, \"events_per_job\": {\"halt_job_o
 n_file_boundaries\": false, \"events_per_lumi\": 100, \"algorithm\": \"EventBased\", \"applyLumiCorrection\": true, \"runs\": [], \"lumis\": [], \"splitOnRun\": false, \"events_per_job\": 1}, \"halt_job_on_file_boundaries\": false}", "Out": "_condor_stdout", "ImageSize_RAW": 100, "DAG_NodesPostrun": 0, "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,N
 iceUser,Rank,Requirements,DiskUsage", "LastHoldReasonCode": 16, "NumCkpts": 0, "CRAB_BlacklistT1": 0, "Err": "_condor_stderr", "JobFinishedHookDone": 1439847319, "RequestMemory_RAW": 2000, "TransferOutputRemaps": "undefined", "ProcId": 0, "ImageSize": 100, "JobUniverse": 7, "DAG_NodesTotal": 25, "CRAB_JobType": "analysis", "SUBMIT_Iwd": "/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9", "WhenToTransferOutput": "ON_EXIT", "ExitBySignal": false, "OnExitHold": "( ExitCode =!= undefined && ExitCode != 0 )", "OrigMaxHosts": 1, "RequestMemory": 2000, "NumJobStarts": 1, "CRAB_UserHN": "agilbert", "LastHoldReason": "Spooling input data files", "TotalSuspensions": 0, "CRAB_FailedNodeLimit": -1, "ExitCode": 0, "CRAB_PublishName": "prefit_cms_asimov_A1_5D-59ffde2b5d41be5f0c401d0a6a8a0194", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/73/uscms5616/150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "JobNotification": 0, "CRAB_DashboardTaskType": "anal
 ysis", "SUBMIT_TransferOutputRemaps": "_condor_stdout=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.out;_condor_stderr=/data/srv/tmp/_150816_222636:agilbert_crab_prefit_cms_asimov_A1_5DYpFxP9/request.err", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "LocalUserCpu": 0.0d, "BufferBlockSize": 32768, "LastJobStatus": 2, "CommittedTime": 0, "CRAB_SaveLogsFlag": 0, "LastSuspensionTime": 0, "TaskType": "ROOT", "DAG_NodesDone": 25, "CumulativeSlotTime": 82427.0d, "TransferOutput": "RunJobs.dag.dagman.out, RunJobs.dag.rescue.001", "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "OtherJobRemoveRequirements": "DAGManJobId =?= ClusterId", "CondorPlatform": "$CondorPlatform: X86_64-ScientificLinux_6.6 $", "PeriodicRelease": false, "JobRunCount": 1, "CRAB_Publish": 0, "JobPrio": 10, "CRAB_TransferOutputs": 1, "CRAB_Attempt": 0, "LocalSysCpu": 0.0d, "RemoteSysCpu": 0.0d, "TransferInput": "gWMS-CMSRunAnalysis.sh, CMSRunAnalysis.sh,
  cmscp.py, RunJobs.dag, Job.submit, dag_bootstrap.sh, AdjustSites.py, site.ad, site.ad.json, run_and_lumis.tar.gz, sandbox.tar.gz, CMSRunAnalysis.tar.gz, TaskManagerRun.tar.gz", "PeriodicHold": false, "CRAB_NumAutomJobRetries": 2, "CRAB_LumiMask": "{}", "CRAB_InputData": "/MinBias", "WantRemoteIO": true, "CommittedSuspensionTime": 0, "CRAB_JobSW": "CMSSW_7_4_0_pre9", "StageInStart": 1439764886, "CRAB_SiteWhitelist": "{  }", "CompletionDate": 1439847319, "StreamErr": false, "CRAB_RestHost": "cmsweb.cern.ch", "RemoteCondorSetup": "", "CRAB_ReqName": "150816_222636:agilbert_crab_prefit_cms_asimov_A1_5D", "DAG_NodesPrerun": 0, "WantRemoteSyscalls": false, "DAG_NodesQueued": 0, "DAG_NodesUnready": 0, "Owner": "uscms5616", "Requirements": "true || false && TARGET.OPSYS == \"LINUX\" && TARGET.ARCH == \"X86_64\" && TARGET.HasFileTransfer && TARGET.Disk >= RequestDisk && TARGET.Memory >= RequestMemory", "CRAB_JobCount": 25, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "CRAB_SplitAlgo": "
 EventBased", "DiskUsage": 1, "CRAB_MaxPost": 20, "ClusterId": 1217455, "BufferSize": 524288, "DAG_NodesFailed": 0, "MaxWallTimeMins": 1400, "CRAB_PublishGroupName": 0, "CommittedSlotTime": 0, "CRAB_SiteBlacklist": "{ \"T2_FR_CCIN2P3\",\"T1_IT_CNAF\",\"T1_ES_PIC\",\"T1_UK_RAL\",\"T2_FI_HIP\",\"T2_US_Nebraska\" }", "Args": "RunJobs.dag", "CRAB_EDMOutputFiles": "{  }", "Environment": "strcat(\"PATH=/usr/bin:/bin CRAB3_VERSION=3.3.0-pre1 CONDOR_ID=\",ClusterId,\".\",ProcId,\" CRAB_RUNTIME_TARBALL=local CRAB_TASKMANAGER_TARBALL=local\")", "CRAB_UserVO": "cms", "Iwd": "/data/condor_local/spool/7455/0/cluster1217455.proc0.subproc0", "QDate": 1439764883, "CurrentHosts": 0, "User": "uscms5616@cms", "StreamOut": false }
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1233705.0#1439964847", "PostJobPrio1": -1439209593, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.main", "PostJobPrio2": 2, "JobStartDate": 1439965560, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2800", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_KR_KNU", "MATCH_EXP_JOB_Site": "CERN", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 165965.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T1_DE_KIT,T2_UK_London_IC,T2_CH_CERN", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_CH_CERN", "RootDir": "/", "JOB_GLIDEIN_ToDie": "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/user/kbutanov.0
 3af76ad04ddc195ee96e6a5469f1bbb1777390d/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440131525, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_CH_CERN", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "LSF", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 59069, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "ce302.cern.ch:8443/cream-lsf-grid_cms", "CRAB_Workflow": "150810_122536:kbutanov_crab_25ns_
 WJetsToLNu_HT600_800", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 163084.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins3@cmsgwms-factory.fnal.gov", "BytesSent": 119952.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins3@cmsgwms-factory.fnal.gov", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements", "MATCH_GLIDEIN_SiteWMS_Queue": "grid_cms", "NumCkpts": 0, "JobFinishedH
 ookDone": 1440131525, "ImageSize": 4250000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440530096, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1439965560, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_CH_CERN", "MATCH_EXP_Used_Gatekeeper": "ce302.cern.ch:8443/cream-lsf-grid_cms", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_CH_CERN_ce302", "TerminationPending": true, "CRAB_UserHN": "kbutanov", "BlockReads": 0, "DAGManJobId": 1035690, "MATCH_GLIDEIN_SEs": "srm-eoscms.cern.ch", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509UserProxyExpiration,\"Removed job due to proxy expiration\",\"Removed due to job being held\")
 )))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_CH_CERN_ce302", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440530096", "LastJobLeaseRenewal": 1440131524, "AcctGroupUser": "uscms5111", "MATCH_EXP_JOB_GLIDEIN_Factory": "gfactory_service", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.53", "x509UserProxyEmail": "khakimjan.butanov@cern.ch", "CRAB_localOutputFiles": "stepB_MC.root=stepB_MC_53.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "gfactory_service", "accounting_group": "analysis", "DAGNodeName": "Job53", "PeriodicRelease": "( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode
  == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "59069", "MATCH_GLIDEIN_MaxMemMBs": 2800, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM#85bfee36-3b82-11e5-be34-001e67abf518", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESIRED_OpSyses": "LINUX", "DAGManNodesLog": "/data/condor_local/spool/5690/0/clust
 er1035690.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms5111", "PreJobPrio1": 1, "DiskUsage": 75000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 2800, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_CH_CERN", "Iwd": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 165949, "SubmitEventNotes": "DAG Node: Job53", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov", "CRAB_TFileOutputFil
 es": "{ \"stepB_MC.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16275, "StartdPrincipal": "execute-side@matchsession/128.142.45.103", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2800, "MATCH_GLIDEIN_SiteWMS_Slot": "Unknown", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 2800, "JobCurrentStartDate": 1439965560, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/LatinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/log/cmsRun_53.log.tar.gz, srm://cluster142.knu.ac.kr:8443/srm/managerv2?SFN=/pnfs/knu.ac.kr/data/cms/store/user/kbutanov/HWWwidthRun2/La
 tinoTrees_V4/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/crab_25ns_WJetsToLNu_HT600_800/150810_122536/0000/stepB_MC_53.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440397268, "x509userproxy": "/data/condor_local/spool/5690/0/cluster1035690.proc0.subproc0/8123da6528ec4abd24562a99b4f2b0ec556bed0b", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "LSF", "NumRestarts": 0, "DiskUsage_RAW": 61434, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "689255460", "ResidentSetSize_RAW": 1238992, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "/data/condor_local/spool/5690/0/cluster1035690.proc0
 .subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.53", "ImageSize_RAW": 4095188, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2800", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "1", "CRAB_BlacklistT1": 0, "Err": "job_err.53", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 2, "NumJobReconnects": 2, "SpooledOutputFiles": "jobReport.json.53", "MATCH_GLIDEIN_Site": "CERN", "BlockWriteKbytes": 0, "SpoolOnEvict": false, "WhenToTransferOutput":
  "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.main", "JobCurrentStartExecutingDate": 1439965573, "MATCH_GLIDEIN_ProcId": 1, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_25ns_WJetsToLNu_HT600_800-9da7f68dc2032d8626d7e7822bb10506", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/68/uscms5111/150810_122536:kbutanov_crab_25ns_WJetsToLNu_HT600_800", "BlockReadKbytes": 0, "AccountingGroup": "analysis.kbutanov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440564896", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "CERN", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 165965, "CRAB_Retry": 2, "LastSuspensionTime": 0, "MATCH_EXP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "Cum
 ulativeSlotTime": 165965.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 2128005.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "Unknown", "JobRunCount": 1, "LastRemoteHost": "glidein_9757_931570227@b635ef6906.cern.ch", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "689255460", "RemoteSysCpu": 1963.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v2/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7", "DAGParentNodeNames": "", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)", "CompletionDate": 1440131525, "CRAB_RestHost": "cmsweb.cern.ch", "MATCH_EXP_
 JOB_GLIDEIN_SiteWMS_Queue": "grid_cms", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=kbutanov/CN=727362/CN=Khakimjan Butanov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1233705, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 165965.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440564896, "LastPublicClaimId": "<128.142.45.103:55332>#1439963327#3#...", "CurrentHosts": 0, "QDate": 1439964847, "Arguments": "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=53 
 --cmsswVersion=CMSSW_7_4_7 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/WJetsToLNu_HT-600To800_TuneCUETP8M1_13TeV-madgraphMLM-pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v2/50000/6E2F932B-633B-E511-A7AE-F04DA23BCE4C.root\"] --runAndLumis=job_lumis_53.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm-eoscms.cern.ch", "CRAB_Id": 53, "User": "uscms5111@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1235991.0#1440081300", "PostJobPrio1": -1439550850, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "PostJobPrio2": 3, "JobStartDate": 1440081527, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2500", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "MATCH_EXP_JOB_Site": "Nebraska", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 31976.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_US_Nebraska", "RootDir": "/", "JOB_GLIDEIN_ToDie": 
 "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440113503, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_US_Nebraska", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "HTCondor", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 3043383, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "red-gw1.unl.edu red-gw1.u
 nl.edu:9619", "CRAB_Workflow": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 27257.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins5@gfactory-1.t2.ucsd.edu", "BytesSent": 604821.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins5@gfactory-1.t2.ucsd.edu", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requireme
 nts", "MATCH_GLIDEIN_SiteWMS_Queue": "red-gw1.unl.edu", "NumCkpts": 0, "JobFinishedHookDone": 1440113503, "ImageSize": 2000000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440630710, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1440081527, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_US_Nebraska", "MATCH_EXP_Used_Gatekeeper": "red-gw1.unl.edu red-gw1.unl.edu:9619", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_gw1_long", "TerminationPending": true, "CRAB_UserHN": "mrodozov", "BlockReads": 0, "DAGManJobId": 1183604, "MATCH_GLIDEIN_SEs": "srm.unl.edu", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509Use
 rProxyExpiration,\"Removed job due to proxy expiration\",\"Removed due to job being held\"))))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_gw1_long", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440630710", "LastJobLeaseRenewal": 1440113502, "AcctGroupUser": "uscms3850", "MATCH_EXP_JOB_GLIDEIN_Factory": "SDSC", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.3", "x509UserProxyEmail": "mircho.nikolaev.rodozov@cern.ch", "CRAB_localOutputFiles": "results.root=results_3.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "SDSC", "accounting_group": "analysis", "DAGNodeName": "Job3", "PeriodicRele
 ase": "( HoldReasonCode == 28 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "3043383", "MATCH_GLIDEIN_MaxMemMBs": 2500, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESI
 RED_OpSyses": "LINUX", "DAGManNodesLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms3850", "PreJobPrio1": 0, "DiskUsage": 4250000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 1400, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_US_Nebraska", "Iwd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 31968, "SubmitEventNotes": "DAG Node: Job3", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organ
 ic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "CRAB_TFileOutputFiles": "{ \"results.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16278, "StartdPrincipal": "execute-side@matchsession/129.93.183.127", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2500, "MATCH_GLIDEIN_SiteWMS_Slot": "slot1_32@red-d23n7.unl.edu", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 1400, "JobCurrentStartDate": 1440081527, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_3.log.tar.gz, srm://srm-eoscms.cern.ch:
 8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_3.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440171330, "x509userproxy": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "HTCondor", "NumRestarts": 0, "DiskUsage_RAW": 4111436, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "5096573.0", "ResidentSetSize_RAW": 1174388, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.
 ch/crabcache", "Cmd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.3", "ImageSize_RAW": 1756756, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2500", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "14", "CRAB_BlacklistT1": 0, "Err": "job_err.3", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 42, "SpooledOutputFiles": "jobReport.json.3", "MATCH_GLIDEIN_Site": "Nebraska", "BlockWriteKby
 tes": 0, "SpoolOnEvict": false, "WhenToTransferOutput": "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "JobCurrentStartExecutingDate": 1440081533, "MATCH_GLIDEIN_ProcId": 14, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "BlockReadKbytes": 0, "AccountingGroup": "analysis.mrodozov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440665510", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "Nebraska", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 31976, "CRAB_Retry": 3,
  "LastSuspensionTime": 0, "MATCH_EXP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "CumulativeSlotTime": 31976.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 4.4879356E7d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "slot1_32@red-d23n7.unl.edu", "JobRunCount": 1, "LastRemoteHost": "glidein_11321_920434792@red-d23n7.unl.edu", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "5096573.0", "RemoteSysCpu": 621.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7_patch2", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)"
 , "CompletionDate": 1440113503, "CRAB_RestHost": "cmsweb.cern.ch", "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue": "red-gw1.unl.edu", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1235991, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 31976.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440665510, "LastPublicClaimId": "<129.93.183.127:56441>#1440063351#7#...", "CurrentHosts": 0, "QDate": 1440081300, "A
 rguments": "-a sandbox.tar.gz --sourceURL=https://cmsweb.cern.ch/crabcache --jobNumber=3 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/06DE7D5F-D4FB-E411-9C85-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E29E093E-54FC-E411-8AE5-0025905A60FE.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/F0FDF730-EDFB-E411-842B-00261834B51D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/FECCF363-F5FB-E411-85A3-002590DBDFE0.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/0E4CEBFE-ECFB-E411-9F0C-842B2B29273C.root\",' '\"/store/mc
 /RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/ECF66DCC-F0FB-E411-84CF-00259074AE32.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/96F29C69-D4FB-E411-9028-842B2B292627.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/6E887F0F-EDFB-E411-875B-BCAEC54B303A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/0C788712-F5FB-E411-AA0E-AC853D9DAC29.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/723A41AE-F4FB-E411-BAA3-0025905C431A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/DA4EA0F5-F4FB-E411-B2AD-00259073E31C.root\",' '\"/store/mc/RunIISpring1
 5DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/98C8F097-F7FB-E411-9A1F-52540006FB8D.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/DE4F8235-5FFC-E411-80CD-0025905A6088.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/EA5D6151-F5FB-E411-99F0-0026B92E0C74.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/205D0CF9-F4FB-E411-934D-000F532734AC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/24BCAED9-F0FB-E411-A35B-00259074AE54.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/A4C160C1-F4FB-E411-A66D-B083FED76C6C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_
 300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E86B536C-54FC-E411-8787-AC853D9DACE1.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/2E68E42D-EDFB-E411-8027-001E67397CC9.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/A056D12B-EDFB-E411-9E51-52540006FDD6.root\"] --runAndLumis=job_lumis_3.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm.unl.edu", "CRAB_Id": 3, "User": "uscms3850@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }
+{ "GlobalJobId": "crab3-1@submit-5.t2.ucsd.edu#1235992.0#1440081300", "PostJobPrio1": -1439550850, "MATCH_EXP_JOB_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "PostJobPrio2": 3, "JobStartDate": 1440081782, "CRAB_UserGroup": "undefined", "MATCH_EXP_JOB_GLIDEIN_MaxMemMBs": "2500", "MATCH_GLIDEIN_Name": "gfactory_instance", "JobStatus": 4, "JOB_GLIDEIN_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "CRAB_TaskWorker": "vocms052", "MATCH_GLIDEIN_Max_Walltime": 603000, "JOB_GLIDEIN_SiteWMS": "$$(GLIDEIN_SiteWMS:Unknown)", "CoreSize": -1, "CRAB_AsyncDest": "T2_CH_CERN", "MATCH_EXP_JOB_Site": "Nebraska", "Rank": 0.0d, "JOB_GLIDEIN_Memory": "$$(Memory:Unknown)", "WantCheckpoint": false, "RemoteWallClockTime": 33360.0d, "JOB_GLIDEIN_Name": "$$(GLIDEIN_Name:Unknown)", "DESIRED_SITES": "T2_US_UCSD,T2_DE_DESY,T2_CH_CSCS,T2_US_MIT,T2_IT_Legnaro,T2_UK_London_Brunel,T2_CH_CERN,T2_UK_London_IC,T3_CH_PSI,T1_UK_RAL", "MaxHosts": 1, "MATCH_EXP_JOB_CMSSite": "T2_US_Nebraska", "RootDir": "/", "JOB_GLIDEIN_ToDie": 
 "$$(GLIDEIN_ToDie:Unknown)", "RecentBlockWriteKbytes": 0, "CRAB_Dest": "/store/temp/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316", "JOB_GLIDEIN_SiteWMS_Queue": "$$(GLIDEIN_SiteWMS_Queue:Unknown)", "CRAB_RestURInoAPI": "/crabserver/prod", "EnteredCurrentStatus": 1440115142, "MATCH_EXP_JOBGLIDEIN_CMSSite": "T2_US_Nebraska", "RequestDisk": 100000, "MATCH_EXP_JOB_GLIDEIN_SiteWMS": "HTCondor", "MyType": "Job", "MATCH_GLIDEIN_ClusterId": 2561111, "PeriodicRemove": "( ( JobStatus =?= 5 ) && ( time() - EnteredCurrentStatus > 7 * 60 ) ) || ( ( JobStatus =?= 2 ) && ( ( MemoryUsage > RequestMemory ) || ( MaxWallTimeMins * 60 < time() - EnteredCurrentStatus ) || ( DiskUsage > 100000000 ) ) ) || ( ( JobStatus =?= 1 ) && ( time() > ( x509UserProxyExpiration + 86400 ) ) )", "CondorVersion": "$CondorVersion: 8.3.5 Apr 16 2015 BuildID: 315103 $", "MATCH_GLIDEIN_Gatekeeper": "red.unl.edu red.unl.edu:9
 619", "CRAB_Workflow": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "NumCkpts_RAW": 0, "MinHosts": 1, "RequestCpus": 1, "RemoteUserCpu": 28513.0d, "MATCH_EXP_JOB_GLIDEIN_Schedd": "schedd_glideins6@glidein.grid.iu.edu", "BytesSent": 597241.0d, "MATCH_GLIDEIN_Schedd": "schedd_glideins6@glidein.grid.iu.edu", "AutoClusterAttrs": "CheckpointPlatform,DESIRED_Gatekeepers,DESIRED_Sites,MaxWallTimeMins,RequestMemory,REQUIRED_OS,JobUniverse,LastCheckpointPlatform,NumCkpts,x509userproxyfirstfqan,x509userproxysubject,MachineLastMatchTime,DynamicSlot,PartitionableSlot,Slot1_ExpectedMachineGracefulDrainingCompletion,Slot1_JobStarts,Slot1_SelfMonitorAge,Slot1_TotalTimeClaimedBusy,Slot1_TotalTimeUnclaimedIdle,CMS_ALLOW_OVERFLOW,CRAB_UserRole,DESIRED_Overflow_Region,WMAgent_AgentName,CMSGroups,_condor_RequestCpus,_condor_RequestDisk,_condor_RequestMemory,RequestCpus,RequestDisk,WithinResourceLimits,opportunistic_job,ConcurrencyLimits,NiceUser,Rank,Requirements", "MATCH_G
 LIDEIN_SiteWMS_Queue": "red.unl.edu", "NumCkpts": 0, "JobFinishedHookDone": 1440115142, "ImageSize": 1750000, "JobUniverse": 5, "EncryptExecuteDirectory": false, "MATCH_GLIDEIN_ToRetire": 1440616411, "CRAB_JobType": "analysis", "EnvDelim": ";", "ExitBySignal": false, "LastMatchTime": 1440081782, "OrigMaxHosts": 1, "MATCH_EXP_JOB_GLIDEIN_CMSSite": "T2_US_Nebraska", "MATCH_EXP_Used_Gatekeeper": "red.unl.edu red.unl.edu:9619", "MATCH_EXP_JOB_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_long", "TerminationPending": true, "CRAB_UserHN": "mrodozov", "BlockReads": 0, "DAGManJobId": 1183604, "MATCH_GLIDEIN_SEs": "srm.unl.edu", "MemoryUsage": "( ( ResidentSetSize + 1023 ) / 1024 )", "PeriodicRemoveReason": "ifThenElse(MemoryUsage > RequestMemory,\"Removed due to memory use\",ifThenElse(MaxWallTimeMins * 60 < time() - EnteredCurrentStatus,\"Removed due to wall clock limit\",ifThenElse(DiskUsage > 100000000,\"Removed due to disk usage\",ifThenElse(time() > x509UserProxyExpiration,\"Removed job
  due to proxy expiration\",\"Removed due to job being held\"))))", "JobNotification": 0, "AcctGroup": "analysis", "LocalUserCpu": 0.0d, "NumJobMatches": 1, "MATCH_GLIDEIN_Entry_Name": "CMS_T2_US_Nebraska_Red_long", "ExecutableSize_RAW": 6, "CRAB_SaveLogsFlag": 1, "UserLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/job_log", "use_x509userproxy": true, "DAGManNodesMask": "0,1,2,4,5,7,9,10,11,12,13,16,17,24,27", "MATCH_EXP_JOB_GLIDEIN_ToRetire": "1440616411", "LastJobLeaseRenewal": 1440115142, "AcctGroupUser": "uscms3850", "MATCH_EXP_JOB_GLIDEIN_Factory": "OSGGOC", "JOB_GLIDEIN_Job_Max_Time": "$$(GLIDEIN_Job_Max_Time:Unknown)", "TransferOutput": "jobReport.json.4", "x509UserProxyEmail": "mircho.nikolaev.rodozov@cern.ch", "CRAB_localOutputFiles": "results.root=results_4.root", "CondorPlatform": "$CondorPlatform: X86_64-RedHat_6.6 $", "MATCH_GLIDEIN_Factory": "OSGGOC", "accounting_group": "analysis", "DAGNodeName": "Job4", "PeriodicRelease": "( HoldReasonCode == 28 
 ) || ( HoldReasonCode == 30 ) || ( HoldReasonCode == 13 ) || ( HoldReasonCode == 6 )", "CRAB_Publish": 1, "MATCH_EXP_JOB_GLIDEIN_ClusterId": "2561111", "MATCH_GLIDEIN_MaxMemMBs": 2500, "JOB_GLIDEIN_SiteWMS_Slot": "$$(GLIDEIN_SiteWMS_Slot:Unknown)", "JOB_GLIDEIN_Entry_Name": "$$(GLIDEIN_Entry_Name:Unknown)", "TransferInput": "CMSRunAnalysis.sh,cmscp.py,CMSRunAnalysis.tar.gz,sandbox.tar.gz,run_and_lumis.tar.gz", "CRAB_NumAutomJobRetries": 2, "CommittedSuspensionTime": 0, "CRAB_DataBlock": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM#242b435c-fc56-11e4-bda5-001e67abef8c", "MATCH_EXP_JOB_GLIDEIN_Name": "gfactory_instance", "StreamErr": false, "CRAB_ReqName": "150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "CMS_ALLOW_OVERFLOW": "True", "RecentBlockReadKbytes": 0, "WantRemoteSyscalls": false, "MATCH_GLIDEIN_Job_Max_Time": 34800, "NumShadowStarts": 1, "MachineAttrCpus0": 1, "DESIRED_OpSyses": "LINUX", "DAGMan
 NodesLog": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/RunJobs.dag.nodes.log", "Owner": "uscms3850", "PreJobPrio1": 0, "DiskUsage": 3750000, "CRAB_ASOURL": "https://cmsweb.cern.ch/couchdb", "JobLeaseDuration": 1200, "x509UserProxyFirstFQAN": "/cms/Role=NULL/Capability=NULL", "Env": "CRAB_TASKMANAGER_TARBALL=local;SCRAM_ARCH=slc6_amd64_gcc491;CRAB_RUNTIME_TARBALL=local", "MaxWallTimeMins": 1400, "CRAB_SiteBlacklist": "{  }", "JOB_GLIDEIN_MaxMemMBs": "$$(GLIDEIN_MaxMemMBs:Unknown)", "RecentStatsLifetimeStarter": 1200, "CRAB_EDMOutputFiles": "{  }", "MATCH_GLIDEIN_CMSSite": "T2_US_Nebraska", "Iwd": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0", "JOB_GLIDEIN_SEs": "$$(GLIDEIN_SEs:Unknown)", "StreamOut": false, "JobAdInformationAttrs": "MATCH_EXP_JOBGLIDEIN_CMSSite, JOBGLIDEIN_CMSSite, RemoteSysCpu, RemoteUserCpu", "StatsLifetimeStarter": 33352, "SubmitEventNotes": "DAG Node: Job4", "CRAB_UserDN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/
 CN=692532/CN=Mircho Nikolaev Rodozov", "CRAB_TFileOutputFiles": "{ \"results.root\" }", "LeaveJobInQueue": false, "AutoClusterId": 16278, "StartdPrincipal": "execute-side@matchsession/129.93.182.12", "JOB_Gatekeeper": "ifthenelse(substr(Used_Gatekeeper,0,1) =!= \"$\",Used_Gatekeeper,ifthenelse(MATCH_GLIDEIN_Gatekeeper =!= undefined,MATCH_GLIDEIN_Gatekeeper,\"Unknown\"))", "MATCH_Memory": 2500, "MATCH_GLIDEIN_SiteWMS_Slot": "slot1_6@red-d8n12.unl.edu", "CRAB_StageoutPolicy": "local,remote", "OnExitRemove": true, "MaxWallTimeMins_RAW": 1400, "JobCurrentStartDate": 1440081782, "CRAB_ASOTimeout": 86400, "ExitStatus": 0, "JOB_GLIDECLIENT_Name": "$$(GLIDECLIENT_Name:Unknown)", "CRAB_Destination": "srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/log/cmsRun_4.log.tar.gz, srm://srm-eoscms.cern.ch:8443/srm/v2/server?SFN=/eos/cms/
 store/group/phys_b2g/BprimeKit_ntuple_747_1_MC/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/150814_111316/0000/results_4.root", "CRAB_DBSURL": "https://cmsweb.cern.ch/dbs/prod/global/DBSReader", "ResidentSetSize": 1250000, "x509UserProxyExpiration": 1440171330, "x509userproxy": "/data/condor_local/spool/3604/0/cluster1183604.proc0.subproc0/3adf46df379a2324bc159ae74f147ae01ca238c9", "JOB_GLIDEIN_SiteWMS_JobId": "$$(GLIDEIN_SiteWMS_JobId:Unknown)", "In": "/dev/null", "MATCH_GLIDEIN_SiteWMS": "HTCondor", "NumRestarts": 0, "DiskUsage_RAW": 3661158, "JOB_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_GLIDEIN_SiteWMS_JobId": "5092137.0", "ResidentSetSize_RAW": 1148372, "x509userproxysubject": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov", "JOBGLIDEIN_CMSSite": "$$([ifThenElse(GLIDEIN_CMSSite is undefined, \"Unknown\", GLIDEIN_CMSSite)])", "CRAB_ISB": "https://cmsweb.cern.ch/crabcache", "Cmd": "/data/con
 dor_local/spool/3604/0/cluster1183604.proc0.subproc0/gWMS-CMSRunAnalysis.sh", "RequestDisk_RAW": 1, "CRAB_AdditionalOutputFiles": "{  }", "ShouldTransferFiles": "YES", "TargetType": "Machine", "MATCH_EXP_JOB_GLIDEIN_Job_Max_Time": "34800", "CRAB_UserRole": "undefined", "BlockWrites": 0, "NiceUser": false, "Out": "job_out.4", "ImageSize_RAW": 1727056, "JOB_CMSSite": "$$(GLIDEIN_CMSSite:Unknown)", "x509UserProxyVOName": "cms", "DESIRED_Overflow_Region": "regexps(\"T[12]_US_\",DESIRED_Sites,\"US\")", "CRAB_JobArch": "slc6_amd64_gcc491", "CumulativeSuspensionTime": 0, "JOB_GLIDEIN_Site": "$$(GLIDEIN_Site:Unknown)", "MATCH_EXP_JOB_GLIDEIN_Memory": "2500", "TransferIn": false, "MATCH_EXP_JOB_GLIDEIN_ProcId": "8", "CRAB_BlacklistT1": 0, "Err": "job_err.4", "RecentBlockWrites": 0, "RequestMemory_RAW": 2000, "ProcId": 0, "RecentBlockReads": 0, "TransferInputSizeMB": 42, "SpooledOutputFiles": "jobReport.json.4", "MATCH_GLIDEIN_Site": "Nebraska", "BlockWriteKbytes": 0, "SpoolOnEvict": false, "
 WhenToTransferOutput": "ON_EXIT_OR_EVICT", "MATCH_GLIDECLIENT_Name": "CMSG-v1_0.overflow", "JobCurrentStartExecutingDate": 1440081789, "MATCH_GLIDEIN_ProcId": 8, "DESIRED_OpSysMajorVers": "6", "OnExitHold": false, "RequestMemory": 2000, "NumJobStarts": 1, "JOB_GLIDEIN_ToRetire": "$$(GLIDEIN_ToRetire:Unknown)", "JOB_GLIDEIN_Schedd": "$$(GLIDEIN_Schedd:Unknown)", "TotalSuspensions": 0, "ExitCode": 0, "CRAB_PublishName": "crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8-025cf8039fdddfc0e0037d5a7ca660ac", "CRAB_UserWebDir": "http://submit-5.t2.ucsd.edu/CSstoragePath/54/uscms3850/150814_111316:mrodozov_crab_QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8", "BlockReadKbytes": 0, "AccountingGroup": "analysis.mrodozov", "MATCH_EXP_JOB_GLIDEIN_ToDie": "1440651211", "CRAB_PublishDBSURL": "https://cmsweb.cern.ch/dbs/prod/phys03/DBSWriter", "MATCH_EXP_JOB_GLIDEIN_Site": "Nebraska", "LastJobStatus": 2, "BufferBlockSize": 32768, "CommittedTime": 33360, "CRAB_Retry": 3, "LastSuspensionTime": 0, "MATCH_E
 XP_JOB_GLIDEIN_Max_Walltime": "603000", "TaskType": "Job", "CumulativeSlotTime": 33360.0d, "job_ad_information_attrs": "MATCH_GLIDEIN_Gatekeeper", "MachineAttrSlotWeight0": 1, "NumSystemHolds": 0, "CRAB_RetryOnASOFailures": 1, "Used_Gatekeeper": "$$(GLIDEIN_Gatekeeper:Unknown)", "BytesRecvd": 4.4879356E7d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Slot": "slot1_6@red-d8n12.unl.edu", "JobRunCount": 1, "LastRemoteHost": "glidein_1936_57194584@red-d8n12.unl.edu", "JobPrio": 10, "CRAB_TransferOutputs": 1, "LocalSysCpu": 0.0d, "MATCH_EXP_JOB_GLIDEIN_SiteWMS_JobId": "5092137.0", "RemoteSysCpu": 616.0d, "ExecutableSize": 7, "PeriodicHold": false, "CRAB_InputData": "/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/RunIISpring15DR74-Asympt25ns_MCRUN2_74_V9-v1/MINIAODSIM", "WantRemoteIO": true, "CRAB_JobSW": "CMSSW_7_4_7_patch2", "CRAB_SiteWhitelist": "{  }", "JOB_GLIDEIN_Max_Walltime": "$$(GLIDEIN_Max_Walltime:Unknown)", "JOB_GLIDEIN_ProcId": "$$(GLIDEIN_ProcId:Unknown)", "CompletionDate": 1440115142, "CRAB
 _RestHost": "cmsweb.cern.ch", "MATCH_EXP_JOB_GLIDEIN_SiteWMS_Queue": "red.unl.edu", "CRAB_oneEventMode": 0, "Requirements": "( ( ( target.IS_GLIDEIN =!= true ) || ( target.GLIDEIN_CMSSite =!= undefined ) ) && ( GLIDEIN_REQUIRED_OS =?= \"rhel6\" || OpSysMajorVer =?= 6 ) ) && ( ( Memory >= 1 ) && ( Disk >= 1 ) ) && ( TARGET.Arch == \"X86_64\" ) && ( TARGET.OpSys == \"LINUX\" ) && ( TARGET.Disk >= RequestDisk ) && ( TARGET.Memory >= RequestMemory ) && ( TARGET.HasFileTransfer )", "x509UserProxyFQAN": "/DC=ch/DC=cern/OU=Organic Units/OU=Users/CN=mrodozov/CN=692532/CN=Mircho Nikolaev Rodozov,/cms/Role=NULL/Capability=NULL", "ClusterId": 1235992, "CRAB_PublishGroupName": 0, "BufferSize": 524288, "JOB_GLIDEIN_ClusterId": "$$(GLIDEIN_ClusterId:Unknown)", "CommittedSlotTime": 33360.0d, "DESIRED_Archs": "X86_64", "MATCH_GLIDEIN_ToDie": 1440651211, "LastPublicClaimId": "<129.93.182.12:42491>#1440048812#7#...", "CurrentHosts": 0, "QDate": 1440081300, "Arguments": "-a sandbox.tar.gz --sourceURL=
 https://cmsweb.cern.ch/crabcache --jobNumber=4 --cmsswVersion=CMSSW_7_4_7_patch2 --scramArch=slc6_amd64_gcc491 --inputFile=[\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9A89CA60-69FC-E411-9661-0025905C42B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/34F8B66A-D4FB-E411-8F89-842B2B29273C.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/7CE6B848-F5FB-E411-A605-0025905A60A8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/9E842AA8-54FC-E411-8BC7-000F53273500.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/C44AD465-D4FB-E411-8704-002590200A40.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUE
 TP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/90B6CB1B-07FD-E411-BD52-001E67397CBA.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/183FB65F-69FC-E411-A5A8-0025904B7C26.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/50000/5A0A9A0E-EDFB-E411-B95F-00266CF330B8.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/3E3768F1-61FC-E411-B163-002618943956.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/08DB9DDE-F4FB-E411-9BC9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/CE293F9B-54FC-E411-83E8-AC853D9DACD3.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_p
 ythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/A4479F5F-69FC-E411-B0B5-0025904C6378.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/0419455F-D4FB-E411-AEFA-00261894394A.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/E6BD6C76-54FC-E411-A1F2-AC853D9DACD7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/AC15F863-F5FB-E411-8F07-002590DB9286.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/70000/CC9B7EE2-F4FB-E411-BCD9-52540001DACD.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/5844575F-D4FB-E411-81F5-003048FFD732.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAO
 DSIM/Asympt25ns_MCRUN2_74_V9-v1/60000/6EC5205E-D4FB-E411-9885-001E67396BB7.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/B63200E8-69FC-E411-B949-0025904C51FC.root\",' '\"/store/mc/RunIISpring15DR74/QCD_Pt_300to470_TuneCUETP8M1_13TeV_pythia8/MINIAODSIM/Asympt25ns_MCRUN2_74_V9-v1/80000/14554A42-54FC-E411-86D2-0025905A605E.root\"] --runAndLumis=job_lumis_4.json --lheInputFiles=False --firstEvent=None --firstLumi=None --lastEvent=None --firstRun=None --seeding=AutomaticSeeding --scriptExe=None --eventsPerLumi=None --scriptArgs=[] -o {}", "MATCH_EXP_JOB_GLIDEIN_SEs": "srm.unl.edu", "CRAB_Id": 4, "User": "uscms3850@cms", "JOB_GLIDEIN_Factory": "$$(GLIDEIN_Factory:Unknown)" }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/results/external-library/getCapital/getCapital.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/external-library/getCapital/getCapital.1.adm b/asterix-app/src/test/resources/runtimets/results/external-library/getCapital/getCapital.1.adm
new file mode 100644
index 0000000..16e9591
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/external-library/getCapital/getCapital.1.adm
@@ -0,0 +1,6 @@
+{ "country": "England", "capital": "London" }
+{ "country": "Italy", "capital": "Rome" }
+{ "country": "China", "capital": "Beijing" }
+{ "country": "United States", "capital": "Washington D.C." }
+{ "country": "India", "capital": "New Delhi" }
+{ "country": "Jupiter", "capital": "NOT_FOUND" }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/test/resources/runtimets/results/external-library/typed_adapter/typed_adapter.1.adm
----------------------------------------------------------------------
diff --git a/asterix-app/src/test/resources/runtimets/results/external-library/typed_adapter/typed_adapter.1.adm b/asterix-app/src/test/resources/runtimets/results/external-library/typed_adapter/typed_adapter.1.adm
new file mode 100644
index 0000000..6a3fbcd
--- /dev/null
+++ b/asterix-app/src/test/resources/runtimets/results/external-library/typed_adapter/typed_adapter.1.adm
@@ -0,0 +1,5 @@
+{ "tweetid": 1, "message-text": "1" }
+{ "tweetid": 2, "message-text": "2" }
+{ "tweetid": 3, "message-text": "3" }
+{ "tweetid": 4, "message-text": "4" }
+{ "tweetid": 5, "message-text": "5" }


[27/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/pom.xml
----------------------------------------------------------------------
diff --git a/asterix-app/pom.xml b/asterix-app/pom.xml
index 9b87d9f..09a4c4c 100644
--- a/asterix-app/pom.xml
+++ b/asterix-app/pom.xml
@@ -24,15 +24,14 @@
         <version>0.8.8-SNAPSHOT</version>
     </parent>
     <artifactId>asterix-app</artifactId>
-
-	<licenses>
-		<license>
-			<name>Apache License, Version 2.0</name>
-			<url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
-			<distribution>repo</distribution>
-			<comments>A business-friendly OSS license</comments>
-		</license>
-	</licenses>
+    <licenses>
+        <license>
+            <name>Apache License, Version 2.0</name>
+            <url>http://www.apache.org/licenses/LICENSE-2.0.txt</url>
+            <distribution>repo</distribution>
+            <comments>A business-friendly OSS license</comments>
+        </license>
+    </licenses>
     <build>
         <plugins>
             <plugin>
@@ -95,9 +94,32 @@
                     </execution>
                 </executions>
             </plugin>
+            <plugin>
+    <artifactId>maven-resources-plugin</artifactId>
+    <executions>
+        <execution>
+            <id>copy-external-library</id>
+            <phase>generate-resources</phase>
+            <goals>
+                <goal>copy-resources</goal>
+            </goals>
+            <configuration>
+                <outputDirectory>src/test/resources/externallib</outputDirectory>
+                <overwrite>true</overwrite>
+                <resources>
+                    <resource>
+                        <directory>../asterix-external-data/target</directory>
+                        <includes>
+                            <include>testlib-zip-binary-assembly.zip</include>
+                        </includes>
+                    </resource>
+                </resources>
+            </configuration>
+        </execution>
+    </executions>
+</plugin>
         </plugins>
     </build>
-
     <dependencies>
         <dependency>
             <groupId>javax.servlet</groupId>
@@ -137,41 +159,41 @@
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-algebra</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <scope>compile</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-om</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <type>jar</type>
             <scope>compile</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-metadata</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <type>jar</type>
             <scope>compile</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-tools</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <type>jar</type>
             <scope>compile</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-common</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <type>jar</type>
             <scope>compile</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-common</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <type>test-jar</type>
             <scope>test</scope>
         </dependency>
@@ -181,7 +203,7 @@
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-transactions</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <scope>compile</scope>
         </dependency>
         <dependency>
@@ -217,7 +239,7 @@
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-test-framework</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <scope>test</scope>
         </dependency>
         <dependency>
@@ -235,13 +257,13 @@
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-replication</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
             <scope>compile</scope>
         </dependency>
         <dependency>
             <groupId>org.apache.asterix</groupId>
             <artifactId>asterix-external-data</artifactId>
-            <version>0.8.8-SNAPSHOT</version>
+            <version>${project.version}</version>
         </dependency>
     </dependencies>
-</project>
+</project>
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/assembly/binary-assembly.xml
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/assembly/binary-assembly.xml b/asterix-app/src/main/assembly/binary-assembly.xml
index 013769f..91e2549 100644
--- a/asterix-app/src/main/assembly/binary-assembly.xml
+++ b/asterix-app/src/main/assembly/binary-assembly.xml
@@ -34,4 +34,14 @@
 			<outputDirectory>lib</outputDirectory>
 		</fileSet>
 	</fileSets>
+    <dependencySets>
+        <dependencySet>
+            <outputDirectory>externallib</outputDirectory>
+            <includes>
+                <include>asterix-external-data:*:zip</include>
+            </includes>
+            <unpack>false</unpack>
+            <useTransitiveDependencies>false</useTransitiveDependencies>
+        </dependencySet>
+    </dependencySets>
 </assembly>

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
index 4df461b..79ce721 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/ConnectorAPIServlet.java
@@ -29,7 +29,7 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
-import org.apache.asterix.feed.CentralFeedManager;
+import org.apache.asterix.app.external.CentralFeedManager;
 import org.apache.asterix.metadata.MetadataManager;
 import org.apache.asterix.metadata.MetadataTransactionContext;
 import org.apache.asterix.metadata.declared.AqlMetadataProvider;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
index 6957926..eacee6d 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServlet.java
@@ -32,13 +32,13 @@ import javax.servlet.http.HttpServlet;
 import javax.servlet.http.HttpServletRequest;
 import javax.servlet.http.HttpServletResponse;
 
+import org.apache.asterix.app.external.CentralFeedManager;
 import org.apache.asterix.external.feed.api.IFeedLoadManager;
 import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
 import org.apache.asterix.external.feed.management.FeedConnectionId;
 import org.apache.asterix.external.feed.management.FeedId;
 import org.apache.asterix.external.feed.watch.FeedActivity;
 import org.apache.asterix.external.feed.watch.FeedActivity.FeedActivityDetails;
-import org.apache.asterix.feed.CentralFeedManager;
 
 public class FeedServlet extends HttpServlet {
     private static final long serialVersionUID = 1L;

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
index d459775..52a140d 100644
--- a/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
+++ b/asterix-app/src/main/java/org/apache/asterix/api/http/servlet/FeedServletUtil.java
@@ -26,9 +26,9 @@ import java.util.concurrent.LinkedBlockingQueue;
 import java.util.logging.Level;
 import java.util.logging.Logger;
 
+import org.apache.asterix.app.external.FeedLifecycleListener;
 import org.apache.asterix.external.feed.management.FeedConnectionId;
 import org.apache.asterix.external.feed.message.RemoteSocketMessageListener;
-import org.apache.asterix.feed.FeedLifecycleListener;
 
 public class FeedServletUtil {
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/CentralFeedManager.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/CentralFeedManager.java b/asterix-app/src/main/java/org/apache/asterix/app/external/CentralFeedManager.java
new file mode 100644
index 0000000..cab5e64
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/CentralFeedManager.java
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.io.IOException;
+import java.io.PrintWriter;
+import java.io.StringReader;
+import java.util.List;
+
+import org.apache.asterix.api.common.SessionConfig;
+import org.apache.asterix.api.common.SessionConfig.OutputFormat;
+import org.apache.asterix.aql.translator.QueryTranslator;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.compiler.provider.AqlCompilationProvider;
+import org.apache.asterix.compiler.provider.ILangCompilationProvider;
+import org.apache.asterix.external.feed.api.ICentralFeedManager;
+import org.apache.asterix.external.feed.api.IFeedLoadManager;
+import org.apache.asterix.external.feed.api.IFeedTrackingManager;
+import org.apache.asterix.external.feed.message.SocketMessageListener;
+import org.apache.asterix.lang.aql.parser.AQLParserFactory;
+import org.apache.asterix.lang.common.base.IParser;
+import org.apache.asterix.lang.common.base.IParserFactory;
+import org.apache.asterix.lang.common.base.Statement;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.hyracks.api.client.IHyracksClientConnection;
+import org.apache.hyracks.api.job.JobId;
+import org.apache.hyracks.api.job.JobSpecification;
+
+public class CentralFeedManager implements ICentralFeedManager {
+
+    private static final ICentralFeedManager centralFeedManager = new CentralFeedManager();
+    private static final ILangCompilationProvider compilationProvider = new AqlCompilationProvider();
+
+    public static ICentralFeedManager getInstance() {
+        return centralFeedManager;
+    }
+
+    private final int port;
+    private final IFeedLoadManager feedLoadManager;
+    private final IFeedTrackingManager feedTrackingManager;
+    private final SocketMessageListener messageListener;
+
+    private CentralFeedManager() {
+        this.port = AsterixAppContextInfo.getInstance().getFeedProperties().getFeedCentralManagerPort();
+        this.feedLoadManager = new FeedLoadManager();
+        this.feedTrackingManager = new FeedTrackingManager();
+        this.messageListener = new SocketMessageListener(port, new FeedMessageReceiver(this));
+    }
+
+    @Override
+    public void start() throws AsterixException {
+        messageListener.start();
+    }
+
+    @Override
+    public void stop() throws AsterixException, IOException {
+        messageListener.stop();
+    }
+
+    public static JobId runJob(JobSpecification spec, boolean waitForCompletion) throws Exception {
+        IHyracksClientConnection hcc = AsterixAppContextInfo.getInstance().getHcc();
+        JobId jobId = hcc.startJob(spec);
+        if (waitForCompletion) {
+            hcc.waitForCompletion(jobId);
+        }
+        return jobId;
+    }
+
+    @Override
+    public IFeedLoadManager getFeedLoadManager() {
+        return feedLoadManager;
+    }
+
+    @Override
+    public IFeedTrackingManager getFeedTrackingManager() {
+        return feedTrackingManager;
+    }
+
+    public static class AQLExecutor {
+
+        private static final PrintWriter out = new PrintWriter(System.out, true);
+        private static final IParserFactory parserFactory = new AQLParserFactory();
+
+        public static void executeAQL(String aql) throws Exception {
+            IParser parser = parserFactory.createParser(new StringReader(aql));
+            List<Statement> statements = parser.parse();
+            SessionConfig pc = new SessionConfig(out, OutputFormat.ADM);
+            QueryTranslator translator = new QueryTranslator(statements, pc, compilationProvider);
+            translator.compileAndExecute(AsterixAppContextInfo.getInstance().getHcc(), null,
+                    QueryTranslator.ResultDelivery.SYNC);
+        }
+    }
+
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java b/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java
new file mode 100644
index 0000000..2f497f9
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalIndexingOperations.java
@@ -0,0 +1,762 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.Date;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+import org.apache.asterix.common.api.ILocalResourceMetadata;
+import org.apache.asterix.common.config.AsterixStorageProperties;
+import org.apache.asterix.common.config.DatasetConfig.DatasetType;
+import org.apache.asterix.common.config.DatasetConfig.ExternalDatasetTransactionState;
+import org.apache.asterix.common.config.DatasetConfig.ExternalFilePendingOp;
+import org.apache.asterix.common.config.DatasetConfig.IndexType;
+import org.apache.asterix.common.config.IAsterixPropertiesProvider;
+import org.apache.asterix.common.context.AsterixVirtualBufferCacheProvider;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.ioopcallbacks.LSMBTreeIOOperationCallbackFactory;
+import org.apache.asterix.common.ioopcallbacks.LSMBTreeWithBuddyIOOperationCallbackFactory;
+import org.apache.asterix.common.ioopcallbacks.LSMRTreeIOOperationCallbackFactory;
+import org.apache.asterix.dataflow.data.nontagged.valueproviders.AqlPrimitiveValueProviderFactory;
+import org.apache.asterix.external.api.IAdapterFactory;
+import org.apache.asterix.external.indexing.ExternalFile;
+import org.apache.asterix.external.indexing.FilesIndexDescription;
+import org.apache.asterix.external.indexing.IndexingConstants;
+import org.apache.asterix.external.operators.ExternalDataScanOperatorDescriptor;
+import org.apache.asterix.external.operators.ExternalDatasetIndexesAbortOperatorDescriptor;
+import org.apache.asterix.external.operators.ExternalDatasetIndexesCommitOperatorDescriptor;
+import org.apache.asterix.external.operators.ExternalDatasetIndexesRecoverOperatorDescriptor;
+import org.apache.asterix.external.operators.ExternalFilesIndexOperatorDescriptor;
+import org.apache.asterix.external.operators.IndexInfoOperatorDescriptor;
+import org.apache.asterix.external.provider.AdapterFactoryProvider;
+import org.apache.asterix.external.util.ExternalDataConstants;
+import org.apache.asterix.file.IndexOperations;
+import org.apache.asterix.file.JobSpecificationUtils;
+import org.apache.asterix.formats.nontagged.AqlBinaryComparatorFactoryProvider;
+import org.apache.asterix.formats.nontagged.AqlSerializerDeserializerProvider;
+import org.apache.asterix.formats.nontagged.AqlTypeTraitProvider;
+import org.apache.asterix.metadata.MetadataException;
+import org.apache.asterix.metadata.MetadataManager;
+import org.apache.asterix.metadata.declared.AqlMetadataProvider;
+import org.apache.asterix.metadata.entities.Dataset;
+import org.apache.asterix.metadata.entities.ExternalDatasetDetails;
+import org.apache.asterix.metadata.entities.Index;
+import org.apache.asterix.metadata.utils.DatasetUtils;
+import org.apache.asterix.metadata.utils.ExternalDatasetsRegistry;
+import org.apache.asterix.om.types.ARecordType;
+import org.apache.asterix.om.types.ATypeTag;
+import org.apache.asterix.om.types.BuiltinType;
+import org.apache.asterix.om.types.IAType;
+import org.apache.asterix.om.util.AsterixAppContextInfo;
+import org.apache.asterix.om.util.NonTaggedFormatUtil;
+import org.apache.asterix.transaction.management.opcallbacks.SecondaryIndexOperationTrackerProvider;
+import org.apache.asterix.transaction.management.resource.ExternalBTreeLocalResourceMetadata;
+import org.apache.asterix.transaction.management.resource.PersistentLocalResourceFactoryProvider;
+import org.apache.asterix.transaction.management.service.transaction.AsterixRuntimeComponentsProvider;
+import org.apache.asterix.translator.CompiledStatements.CompiledCreateIndexStatement;
+import org.apache.asterix.translator.CompiledStatements.CompiledIndexDropStatement;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
+import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraintHelper;
+import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.algebricks.common.utils.Pair;
+import org.apache.hyracks.algebricks.core.jobgen.impl.ConnectorPolicyAssignmentPolicy;
+import org.apache.hyracks.api.dataflow.value.IBinaryComparatorFactory;
+import org.apache.hyracks.api.dataflow.value.ISerializerDeserializer;
+import org.apache.hyracks.api.dataflow.value.ITypeTraits;
+import org.apache.hyracks.api.dataflow.value.RecordDescriptor;
+import org.apache.hyracks.api.job.JobSpecification;
+import org.apache.hyracks.dataflow.std.file.IFileSplitProvider;
+import org.apache.hyracks.storage.am.common.api.IPrimitiveValueProviderFactory;
+import org.apache.hyracks.storage.am.common.dataflow.IndexDropOperatorDescriptor;
+import org.apache.hyracks.storage.am.common.impls.NoOpOperationCallbackFactory;
+import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeDataflowHelperFactory;
+import org.apache.hyracks.storage.am.lsm.btree.dataflow.ExternalBTreeWithBuddyDataflowHelperFactory;
+import org.apache.hyracks.storage.am.lsm.btree.dataflow.LSMBTreeDataflowHelperFactory;
+import org.apache.hyracks.storage.am.lsm.common.api.ILSMMergePolicyFactory;
+import org.apache.hyracks.storage.am.lsm.common.dataflow.LSMTreeIndexCompactOperatorDescriptor;
+import org.apache.hyracks.storage.am.lsm.rtree.dataflow.ExternalRTreeDataflowHelperFactory;
+import org.apache.hyracks.storage.am.rtree.frames.RTreePolicyType;
+import org.apache.hyracks.storage.common.file.LocalResource;
+
+public class ExternalIndexingOperations {
+
+    public static final List<List<String>> FILE_INDEX_FIELD_NAMES = new ArrayList<List<String>>();
+    public static final ArrayList<IAType> FILE_INDEX_FIELD_TYPES = new ArrayList<IAType>();
+
+    static {
+        FILE_INDEX_FIELD_NAMES.add(new ArrayList<String>(Arrays.asList("")));
+        FILE_INDEX_FIELD_TYPES.add(BuiltinType.ASTRING);
+    }
+
+    public static boolean isIndexible(ExternalDatasetDetails ds) {
+        String adapter = ds.getAdapter();
+        if (adapter.equalsIgnoreCase(ExternalDataConstants.ALIAS_HDFS_ADAPTER)) {
+            return true;
+        }
+        return false;
+    }
+
+    public static boolean isRefereshActive(ExternalDatasetDetails ds) {
+        return ds.getState() != ExternalDatasetTransactionState.COMMIT;
+    }
+
+    public static boolean isValidIndexName(String datasetName, String indexName) {
+        return (!datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX).equals(indexName));
+    }
+
+    public static String getFilesIndexName(String datasetName) {
+        return datasetName.concat(IndexingConstants.EXTERNAL_FILE_INDEX_NAME_SUFFIX);
+    }
+
+    public static int getRIDSize(Dataset dataset) {
+        ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
+        return IndexingConstants.getRIDSize(dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT));
+    }
+
+    public static IBinaryComparatorFactory[] getComparatorFactories(Dataset dataset) {
+        ExternalDatasetDetails dsd = ((ExternalDatasetDetails) dataset.getDatasetDetails());
+        return IndexingConstants.getComparatorFactories((dsd.getProperties().get(IndexingConstants.KEY_INPUT_FORMAT)));
+    }
+
+    public static IBinaryComparatorFactory[] getBuddyBtreeComparatorFactories() {
+        return IndexingConstants.getBuddyBtreeComparatorFactories();
+    }
+
+    public static ArrayList<ExternalFile> getSnapshotFromExternalFileSystem(Dataset dataset)
+            throws AlgebricksException {
+        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
+        ExternalDatasetDetails datasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
+        try {
+            // Create the file system object
+            FileSystem fs = getFileSystemObject(datasetDetails.getProperties());
+            // Get paths of dataset
+            String path = datasetDetails.getProperties().get(ExternalDataConstants.KEY_PATH);
+            String[] paths = path.split(",");
+
+            // Add fileStatuses to files
+            for (String aPath : paths) {
+                FileStatus[] fileStatuses = fs.listStatus(new Path(aPath));
+                for (int i = 0; i < fileStatuses.length; i++) {
+                    int nextFileNumber = files.size();
+                    if (fileStatuses[i].isDirectory()) {
+                        listSubFiles(dataset, fs, fileStatuses[i], files);
+                    } else {
+                        files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber,
+                                fileStatuses[i].getPath().toUri().getPath(),
+                                new Date(fileStatuses[i].getModificationTime()), fileStatuses[i].getLen(),
+                                ExternalFilePendingOp.PENDING_NO_OP));
+                    }
+                }
+            }
+            // Close file system
+            fs.close();
+            if (files.size() == 0) {
+                throw new AlgebricksException("File Snapshot retrieved from external file system is empty");
+            }
+            return files;
+        } catch (Exception e) {
+            e.printStackTrace();
+            throw new AlgebricksException("Unable to get list of HDFS files " + e);
+        }
+    }
+
+    /* list all files under the directory
+     * src is expected to be a folder
+     */
+    private static void listSubFiles(Dataset dataset, FileSystem srcFs, FileStatus src, ArrayList<ExternalFile> files)
+            throws IOException {
+        Path path = src.getPath();
+        FileStatus[] fileStatuses = srcFs.listStatus(path);
+        for (int i = 0; i < fileStatuses.length; i++) {
+            int nextFileNumber = files.size();
+            if (fileStatuses[i].isDirectory()) {
+                listSubFiles(dataset, srcFs, fileStatuses[i], files);
+            } else {
+                files.add(new ExternalFile(dataset.getDataverseName(), dataset.getDatasetName(), nextFileNumber,
+                        fileStatuses[i].getPath().toUri().getPath(), new Date(fileStatuses[i].getModificationTime()),
+                        fileStatuses[i].getLen(), ExternalFilePendingOp.PENDING_NO_OP));
+            }
+        }
+    }
+
+    public static FileSystem getFileSystemObject(Map<String, String> map) throws IOException {
+        Configuration conf = new Configuration();
+        conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_URI, map.get(ExternalDataConstants.KEY_HDFS_URL).trim());
+        conf.set(ExternalDataConstants.KEY_HADOOP_FILESYSTEM_CLASS, DistributedFileSystem.class.getName());
+        return FileSystem.get(conf);
+    }
+
+    public static JobSpecification buildFilesIndexReplicationJobSpec(Dataset dataset,
+            ArrayList<ExternalFile> externalFilesSnapshot, AqlMetadataProvider metadataProvider, boolean createIndex)
+                    throws MetadataException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(), dataset.getDatasetName(),
+                        getFilesIndexName(dataset.getDatasetName()), true);
+        IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
+        FilesIndexDescription filesIndexDescription = new FilesIndexDescription();
+        ILocalResourceMetadata localResourceMetadata = new ExternalBTreeLocalResourceMetadata(
+                filesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS, filesIndexDescription.FILES_INDEX_COMP_FACTORIES,
+                new int[] { 0 }, false, dataset.getDatasetId(), mergePolicyFactory, mergePolicyFactoryProperties);
+        PersistentLocalResourceFactoryProvider localResourceFactoryProvider = new PersistentLocalResourceFactoryProvider(
+                localResourceMetadata, LocalResource.ExternalBTreeResource);
+        ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
+                mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                storageProperties.getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+        ExternalFilesIndexOperatorDescriptor externalFilesOp = new ExternalFilesIndexOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                secondaryFileSplitProvider, indexDataflowHelperFactory, localResourceFactoryProvider,
+                externalFilesSnapshot, createIndex);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, externalFilesOp,
+                secondarySplitsAndConstraint.second);
+        spec.addRoot(externalFilesOp);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    /**
+     * This method create an indexing operator that index records in HDFS
+     *
+     * @param jobSpec
+     * @param itemType
+     * @param dataset
+     * @param files
+     * @param indexerDesc
+     * @return
+     * @throws Exception
+     */
+    private static Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> getExternalDataIndexingOperator(
+            JobSpecification jobSpec, IAType itemType, Dataset dataset, List<ExternalFile> files,
+            RecordDescriptor indexerDesc, AqlMetadataProvider metadataProvider) throws Exception {
+        ExternalDatasetDetails externalDatasetDetails = (ExternalDatasetDetails) dataset.getDatasetDetails();
+        Map<String, String> configuration = externalDatasetDetails.getProperties();
+        IAdapterFactory adapterFactory = AdapterFactoryProvider.getAdapterFactory(externalDatasetDetails.getAdapter(),
+                configuration, (ARecordType) itemType, files, true);
+        return new Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint>(
+                new ExternalDataScanOperatorDescriptor(jobSpec, indexerDesc, adapterFactory),
+                adapterFactory.getPartitionConstraint());
+    }
+
+    public static Pair<ExternalDataScanOperatorDescriptor, AlgebricksPartitionConstraint> createExternalIndexingOp(
+            JobSpecification spec, AqlMetadataProvider metadataProvider, Dataset dataset, ARecordType itemType,
+            RecordDescriptor indexerDesc, List<ExternalFile> files) throws Exception {
+        if (files == null) {
+            files = MetadataManager.INSTANCE.getDatasetExternalFiles(metadataProvider.getMetadataTxnContext(), dataset);
+        }
+        return getExternalDataIndexingOperator(spec, itemType, dataset, files, indexerDesc, metadataProvider);
+    }
+
+    /**
+     * At the end of this method, we expect to have 4 sets as follows:
+     * metadataFiles should contain only the files that are appended in their original state
+     * addedFiles should contain new files that has number assigned starting after the max original file number
+     * deletedFiles should contain files that are no longer there in the file system
+     * appendedFiles should have the new file information of existing files
+     * The method should return false in case of zero delta
+     *
+     * @param dataset
+     * @param metadataFiles
+     * @param addedFiles
+     * @param deletedFiles
+     * @param appendedFiles
+     * @return
+     * @throws MetadataException
+     * @throws AlgebricksException
+     */
+    public static boolean isDatasetUptodate(Dataset dataset, List<ExternalFile> metadataFiles,
+            List<ExternalFile> addedFiles, List<ExternalFile> deletedFiles, List<ExternalFile> appendedFiles)
+                    throws MetadataException, AlgebricksException {
+        boolean uptodate = true;
+        int newFileNumber = metadataFiles.get(metadataFiles.size() - 1).getFileNumber() + 1;
+
+        ArrayList<ExternalFile> fileSystemFiles = getSnapshotFromExternalFileSystem(dataset);
+
+        // Loop over file system files < taking care of added files >
+        for (ExternalFile fileSystemFile : fileSystemFiles) {
+            boolean fileFound = false;
+            Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator();
+            while (mdFilesIterator.hasNext()) {
+                ExternalFile metadataFile = mdFilesIterator.next();
+                if (fileSystemFile.getFileName().equals(metadataFile.getFileName())) {
+                    // Same file name
+                    if (fileSystemFile.getLastModefiedTime().equals(metadataFile.getLastModefiedTime())) {
+                        // Same timestamp
+                        if (fileSystemFile.getSize() == metadataFile.getSize()) {
+                            // Same size -> no op
+                            mdFilesIterator.remove();
+                            fileFound = true;
+                        } else {
+                            // Different size -> append op
+                            metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_APPEND_OP);
+                            fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_APPEND_OP);
+                            appendedFiles.add(fileSystemFile);
+                            fileFound = true;
+                            uptodate = false;
+                        }
+                    } else {
+                        // Same file name, Different file mod date -> delete and add
+                        metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
+                        deletedFiles
+                                .add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(), 0,
+                                        metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
+                                        metadataFile.getSize(), ExternalFilePendingOp.PENDING_DROP_OP));
+                        fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
+                        fileSystemFile.setFileNumber(newFileNumber);
+                        addedFiles.add(fileSystemFile);
+                        newFileNumber++;
+                        fileFound = true;
+                        uptodate = false;
+                    }
+                }
+                if (fileFound) {
+                    break;
+                }
+            }
+            if (!fileFound) {
+                // File not stored previously in metadata -> pending add op
+                fileSystemFile.setPendingOp(ExternalFilePendingOp.PENDING_ADD_OP);
+                fileSystemFile.setFileNumber(newFileNumber);
+                addedFiles.add(fileSystemFile);
+                newFileNumber++;
+                uptodate = false;
+            }
+        }
+
+        // Done with files from external file system -> metadata files now contain both deleted files and appended ones
+        // first, correct number assignment to deleted and updated files
+        for (ExternalFile deletedFile : deletedFiles) {
+            deletedFile.setFileNumber(newFileNumber);
+            newFileNumber++;
+        }
+        for (ExternalFile appendedFile : appendedFiles) {
+            appendedFile.setFileNumber(newFileNumber);
+            newFileNumber++;
+        }
+
+        // include the remaining deleted files
+        Iterator<ExternalFile> mdFilesIterator = metadataFiles.iterator();
+        while (mdFilesIterator.hasNext()) {
+            ExternalFile metadataFile = mdFilesIterator.next();
+            if (metadataFile.getPendingOp() == ExternalFilePendingOp.PENDING_NO_OP) {
+                metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_DROP_OP);
+                deletedFiles.add(new ExternalFile(metadataFile.getDataverseName(), metadataFile.getDatasetName(),
+                        newFileNumber, metadataFile.getFileName(), metadataFile.getLastModefiedTime(),
+                        metadataFile.getSize(), metadataFile.getPendingOp()));
+                newFileNumber++;
+                uptodate = false;
+            }
+        }
+        return uptodate;
+    }
+
+    public static Dataset createTransactionDataset(Dataset dataset) {
+        ExternalDatasetDetails originalDsd = (ExternalDatasetDetails) dataset.getDatasetDetails();
+        ExternalDatasetDetails dsd = new ExternalDatasetDetails(originalDsd.getAdapter(), originalDsd.getProperties(),
+                originalDsd.getTimestamp(), ExternalDatasetTransactionState.BEGIN);
+        Dataset transactionDatset = new Dataset(dataset.getDataverseName(), dataset.getDatasetName(),
+                dataset.getItemTypeDataverseName(), dataset.getItemTypeName(), dataset.getNodeGroupName(),
+                dataset.getCompactionPolicy(), dataset.getCompactionPolicyProperties(), dsd, dataset.getHints(),
+                DatasetType.EXTERNAL, dataset.getDatasetId(), dataset.getPendingOp());
+        return transactionDatset;
+    }
+
+    public static boolean isFileIndex(Index index) {
+        return (index.getIndexName().equals(getFilesIndexName(index.getDatasetName())));
+    }
+
+    public static JobSpecification buildDropFilesIndexJobSpec(CompiledIndexDropStatement indexDropStmt,
+            AqlMetadataProvider metadataProvider, Dataset dataset) throws AlgebricksException, MetadataException {
+        String dataverseName = indexDropStmt.getDataverseName() == null ? metadataProvider.getDefaultDataverseName()
+                : indexDropStmt.getDataverseName();
+        String datasetName = indexDropStmt.getDatasetName();
+        String indexName = indexDropStmt.getIndexName();
+        boolean temp = dataset.getDatasetDetails().isTemp();
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> splitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForFilesIndex(dataverseName, datasetName, indexName, true);
+        AsterixStorageProperties storageProperties = AsterixAppContextInfo.getInstance().getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+        IndexDropOperatorDescriptor btreeDrop = new IndexDropOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                splitsAndConstraint.first,
+                new LSMBTreeDataflowHelperFactory(new AsterixVirtualBufferCacheProvider(dataset.getDatasetId()),
+                        compactionInfo.first, compactionInfo.second,
+                        new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                        AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                        storageProperties.getBloomFilterFalsePositiveRate(), false, null, null, null, null, !temp));
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, btreeDrop,
+                splitsAndConstraint.second);
+        spec.addRoot(btreeDrop);
+
+        return spec;
+    }
+
+    public static JobSpecification buildFilesIndexUpdateOp(Dataset ds, List<ExternalFile> metadataFiles,
+            List<ExternalFile> deletedFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles,
+            AqlMetadataProvider metadataProvider) throws MetadataException, AlgebricksException {
+        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
+        for (ExternalFile file : metadataFiles) {
+            if (file.getPendingOp() == ExternalFilePendingOp.PENDING_DROP_OP) {
+                files.add(file);
+            } else if (file.getPendingOp() == ExternalFilePendingOp.PENDING_APPEND_OP) {
+                for (ExternalFile appendedFile : appendedFiles) {
+                    if (appendedFile.getFileName().equals(file.getFileName())) {
+                        files.add(new ExternalFile(file.getDataverseName(), file.getDatasetName(), file.getFileNumber(),
+                                file.getFileName(), file.getLastModefiedTime(), appendedFile.getSize(),
+                                ExternalFilePendingOp.PENDING_NO_OP));
+                    }
+                }
+            }
+        }
+        for (ExternalFile file : addedFiles) {
+            files.add(file);
+        }
+        Collections.sort(files);
+        return buildFilesIndexReplicationJobSpec(ds, files, metadataProvider, false);
+    }
+
+    public static JobSpecification buildIndexUpdateOp(Dataset ds, Index index, List<ExternalFile> metadataFiles,
+            List<ExternalFile> deletedFiles, List<ExternalFile> addedFiles, List<ExternalFile> appendedFiles,
+            AqlMetadataProvider metadataProvider) throws AsterixException, AlgebricksException {
+        // Create files list
+        ArrayList<ExternalFile> files = new ArrayList<ExternalFile>();
+
+        for (ExternalFile metadataFile : metadataFiles) {
+            if (metadataFile.getPendingOp() != ExternalFilePendingOp.PENDING_APPEND_OP) {
+                files.add(metadataFile);
+            } else {
+                metadataFile.setPendingOp(ExternalFilePendingOp.PENDING_NO_OP);
+                files.add(metadataFile);
+            }
+        }
+        // add new files
+        for (ExternalFile file : addedFiles) {
+            files.add(file);
+        }
+        // add appended files
+        for (ExternalFile file : appendedFiles) {
+            files.add(file);
+        }
+
+        CompiledCreateIndexStatement ccis = new CompiledCreateIndexStatement(index.getIndexName(),
+                index.getDataverseName(), index.getDatasetName(), index.getKeyFieldNames(), index.getKeyFieldTypes(),
+                index.isEnforcingKeyFileds(), index.getGramLength(), index.getIndexType());
+        return IndexOperations.buildSecondaryIndexLoadingJobSpec(ccis, null, null, metadataProvider, files);
+    }
+
+    public static JobSpecification buildCommitJob(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
+                metadataProvider.getMetadataTxnContext());
+        boolean temp = ds.getDatasetDetails().isTemp();
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                        getFilesIndexName(ds.getDatasetName()), temp);
+        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
+        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
+                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
+        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
+
+        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+
+        for (Index index : indexes) {
+            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
+                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
+                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                                index.getIndexName(), temp);
+                if (index.getIndexType() == IndexType.BTREE) {
+                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, spec));
+                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                } else if (index.getIndexType() == IndexType.RTREE) {
+                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
+                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                }
+            }
+        }
+
+        ExternalDatasetIndexesCommitOperatorDescriptor op = new ExternalDatasetIndexesCommitOperatorDescriptor(spec,
+                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
+                rtreeDataflowHelperFactories, rtreeInfos);
+
+        spec.addRoot(op);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
+                filesIndexSplitsAndConstraint.second);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    private static ExternalBTreeDataflowHelperFactory getFilesIndexDataflowHelperFactory(Dataset ds,
+            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
+            AsterixStorageProperties storageProperties, JobSpecification spec) {
+        return new ExternalBTreeDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                storageProperties.getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
+    }
+
+    private static ExternalBTreeWithBuddyDataflowHelperFactory getBTreeDataflowHelperFactory(Dataset ds, Index index,
+            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
+            AsterixStorageProperties storageProperties, JobSpecification spec) {
+        return new ExternalBTreeWithBuddyDataflowHelperFactory(mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeWithBuddyIOOperationCallbackFactory.INSTANCE,
+                storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
+    }
+
+    @SuppressWarnings("rawtypes")
+    private static ExternalRTreeDataflowHelperFactory getRTreeDataflowHelperFactory(Dataset ds, Index index,
+            ILSMMergePolicyFactory mergePolicyFactory, Map<String, String> mergePolicyFactoryProperties,
+            AsterixStorageProperties storageProperties, AqlMetadataProvider metadataProvider, JobSpecification spec)
+                    throws AlgebricksException, AsterixException {
+        int numPrimaryKeys = getRIDSize(ds);
+        List<List<String>> secondaryKeyFields = index.getKeyFieldNames();
+        secondaryKeyFields.size();
+        ARecordType itemType = (ARecordType) metadataProvider.findType(ds.getItemTypeDataverseName(),
+                ds.getItemTypeName());
+        Pair<IAType, Boolean> spatialTypePair = Index.getNonNullableKeyFieldType(secondaryKeyFields.get(0), itemType);
+        IAType spatialType = spatialTypePair.first;
+        if (spatialType == null) {
+            throw new AsterixException("Could not find field " + secondaryKeyFields.get(0) + " in the schema.");
+        }
+        int numDimensions = NonTaggedFormatUtil.getNumDimensions(spatialType.getTypeTag());
+        int numNestedSecondaryKeyFields = numDimensions * 2;
+        IPrimitiveValueProviderFactory[] valueProviderFactories = new IPrimitiveValueProviderFactory[numNestedSecondaryKeyFields];
+        IBinaryComparatorFactory[] secondaryComparatorFactories = new IBinaryComparatorFactory[numNestedSecondaryKeyFields];
+
+        ISerializerDeserializer[] secondaryRecFields = new ISerializerDeserializer[numPrimaryKeys
+                + numNestedSecondaryKeyFields];
+        ITypeTraits[] secondaryTypeTraits = new ITypeTraits[numNestedSecondaryKeyFields + numPrimaryKeys];
+        IAType nestedKeyType = NonTaggedFormatUtil.getNestedSpatialType(spatialType.getTypeTag());
+        ATypeTag keyType = nestedKeyType.getTypeTag();
+
+        keyType = nestedKeyType.getTypeTag();
+        for (int i = 0; i < numNestedSecondaryKeyFields; i++) {
+            ISerializerDeserializer keySerde = AqlSerializerDeserializerProvider.INSTANCE
+                    .getSerializerDeserializer(nestedKeyType);
+            secondaryRecFields[i] = keySerde;
+
+            secondaryComparatorFactories[i] = AqlBinaryComparatorFactoryProvider.INSTANCE
+                    .getBinaryComparatorFactory(nestedKeyType, true);
+            secondaryTypeTraits[i] = AqlTypeTraitProvider.INSTANCE.getTypeTrait(nestedKeyType);
+            valueProviderFactories[i] = AqlPrimitiveValueProviderFactory.INSTANCE;
+        }
+        // Add serializers and comparators for primary index fields.
+        for (int i = 0; i < numPrimaryKeys; i++) {
+            secondaryRecFields[numNestedSecondaryKeyFields + i] = IndexingConstants.getSerializerDeserializer(i);
+            secondaryTypeTraits[numNestedSecondaryKeyFields + i] = IndexingConstants.getTypeTraits(i);
+        }
+        int[] primaryKeyFields = new int[numPrimaryKeys];
+        for (int i = 0; i < primaryKeyFields.length; i++) {
+            primaryKeyFields[i] = i + numNestedSecondaryKeyFields;
+        }
+
+        return new ExternalRTreeDataflowHelperFactory(valueProviderFactories, RTreePolicyType.RTREE,
+                getBuddyBtreeComparatorFactories(), mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(ds.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMRTreeIOOperationCallbackFactory.INSTANCE,
+                AqlMetadataProvider.proposeLinearizer(keyType, secondaryComparatorFactories.length),
+                storageProperties.getBloomFilterFalsePositiveRate(), new int[] { index.getKeyFieldNames().size() },
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(ds), true);
+    }
+
+    public static JobSpecification buildAbortOp(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
+                metadataProvider.getMetadataTxnContext());
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+
+        boolean temp = ds.getDatasetDetails().isTemp();
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                        getFilesIndexName(ds.getDatasetName()), temp);
+        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
+        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
+                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
+        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
+
+        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+
+        for (Index index : indexes) {
+            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
+                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
+                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                                index.getIndexName(), temp);
+                if (index.getIndexType() == IndexType.BTREE) {
+                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, spec));
+                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                } else if (index.getIndexType() == IndexType.RTREE) {
+                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
+                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                }
+            }
+        }
+
+        ExternalDatasetIndexesAbortOperatorDescriptor op = new ExternalDatasetIndexesAbortOperatorDescriptor(spec,
+                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
+                rtreeDataflowHelperFactories, rtreeInfos);
+
+        spec.addRoot(op);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
+                filesIndexSplitsAndConstraint.second);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+
+    }
+
+    public static JobSpecification buildRecoverOp(Dataset ds, List<Index> indexes, AqlMetadataProvider metadataProvider)
+            throws AlgebricksException, AsterixException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(ds,
+                metadataProvider.getMetadataTxnContext());
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+        boolean temp = ds.getDatasetDetails().isTemp();
+
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> filesIndexSplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                        getFilesIndexName(ds.getDatasetName()), temp);
+        IFileSplitProvider filesIndexSplitProvider = filesIndexSplitsAndConstraint.first;
+        ExternalBTreeDataflowHelperFactory filesIndexDataflowHelperFactory = getFilesIndexDataflowHelperFactory(ds,
+                mergePolicyFactory, mergePolicyFactoryProperties, storageProperties, spec);
+        IndexInfoOperatorDescriptor filesIndexInfo = new IndexInfoOperatorDescriptor(filesIndexSplitProvider,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER);
+
+        ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory> btreeDataflowHelperFactories = new ArrayList<ExternalBTreeWithBuddyDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> btreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+        ArrayList<ExternalRTreeDataflowHelperFactory> rtreeDataflowHelperFactories = new ArrayList<ExternalRTreeDataflowHelperFactory>();
+        ArrayList<IndexInfoOperatorDescriptor> rtreeInfos = new ArrayList<IndexInfoOperatorDescriptor>();
+
+        for (Index index : indexes) {
+            if (isValidIndexName(index.getDatasetName(), index.getIndexName())) {
+                Pair<IFileSplitProvider, AlgebricksPartitionConstraint> indexSplitsAndConstraint = metadataProvider
+                        .splitProviderAndPartitionConstraintsForDataset(ds.getDataverseName(), ds.getDatasetName(),
+                                index.getIndexName(), temp);
+                if (index.getIndexType() == IndexType.BTREE) {
+                    btreeDataflowHelperFactories.add(getBTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, spec));
+                    btreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                } else if (index.getIndexType() == IndexType.RTREE) {
+                    rtreeDataflowHelperFactories.add(getRTreeDataflowHelperFactory(ds, index, mergePolicyFactory,
+                            mergePolicyFactoryProperties, storageProperties, metadataProvider, spec));
+                    rtreeInfos.add(new IndexInfoOperatorDescriptor(indexSplitsAndConstraint.first,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                            AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER));
+                }
+            }
+        }
+
+        ExternalDatasetIndexesRecoverOperatorDescriptor op = new ExternalDatasetIndexesRecoverOperatorDescriptor(spec,
+                filesIndexDataflowHelperFactory, filesIndexInfo, btreeDataflowHelperFactories, btreeInfos,
+                rtreeDataflowHelperFactories, rtreeInfos);
+
+        spec.addRoot(op);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, op,
+                filesIndexSplitsAndConstraint.second);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+
+    public static JobSpecification compactFilesIndexJobSpec(Dataset dataset, AqlMetadataProvider metadataProvider)
+            throws MetadataException, AlgebricksException {
+        JobSpecification spec = JobSpecificationUtils.createJobSpecification();
+        IAsterixPropertiesProvider asterixPropertiesProvider = AsterixAppContextInfo.getInstance();
+        AsterixStorageProperties storageProperties = asterixPropertiesProvider.getStorageProperties();
+        Pair<ILSMMergePolicyFactory, Map<String, String>> compactionInfo = DatasetUtils.getMergePolicyFactory(dataset,
+                metadataProvider.getMetadataTxnContext());
+        ILSMMergePolicyFactory mergePolicyFactory = compactionInfo.first;
+        Map<String, String> mergePolicyFactoryProperties = compactionInfo.second;
+        Pair<IFileSplitProvider, AlgebricksPartitionConstraint> secondarySplitsAndConstraint = metadataProvider
+                .splitProviderAndPartitionConstraintsForFilesIndex(dataset.getDataverseName(), dataset.getDatasetName(),
+                        getFilesIndexName(dataset.getDatasetName()), true);
+        IFileSplitProvider secondaryFileSplitProvider = secondarySplitsAndConstraint.first;
+        ExternalBTreeDataflowHelperFactory indexDataflowHelperFactory = new ExternalBTreeDataflowHelperFactory(
+                mergePolicyFactory, mergePolicyFactoryProperties,
+                new SecondaryIndexOperationTrackerProvider(dataset.getDatasetId()),
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, LSMBTreeIOOperationCallbackFactory.INSTANCE,
+                storageProperties.getBloomFilterFalsePositiveRate(),
+                ExternalDatasetsRegistry.INSTANCE.getDatasetVersion(dataset), true);
+        FilesIndexDescription filesIndexDescription = new FilesIndexDescription();
+        LSMTreeIndexCompactOperatorDescriptor compactOp = new LSMTreeIndexCompactOperatorDescriptor(spec,
+                AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER, AsterixRuntimeComponentsProvider.RUNTIME_PROVIDER,
+                secondaryFileSplitProvider, filesIndexDescription.EXTERNAL_FILE_INDEX_TYPE_TRAITS,
+                filesIndexDescription.FILES_INDEX_COMP_FACTORIES, new int[] { 0 }, indexDataflowHelperFactory,
+                NoOpOperationCallbackFactory.INSTANCE);
+        spec.addRoot(compactOp);
+        AlgebricksPartitionConstraintHelper.setPartitionConstraintInJobSpec(spec, compactOp,
+                secondarySplitsAndConstraint.second);
+        spec.setConnectorPolicyAssignmentPolicy(new ConnectorPolicyAssignmentPolicy());
+        return spec;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
----------------------------------------------------------------------
diff --git a/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java b/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
new file mode 100755
index 0000000..a5654bd
--- /dev/null
+++ b/asterix-app/src/main/java/org/apache/asterix/app/external/ExternalLibraryUtils.java
@@ -0,0 +1,402 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.app.external;
+
+import java.io.File;
+import java.io.FilenameFilter;
+import java.net.URL;
+import java.net.URLClassLoader;
+import java.rmi.RemoteException;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.logging.Level;
+import java.util.logging.Logger;
+
+import javax.xml.bind.JAXBContext;
+import javax.xml.bind.Unmarshaller;
+
+import org.apache.asterix.common.exceptions.ACIDException;
+import org.apache.asterix.common.exceptions.AsterixException;
+import org.apache.asterix.common.functions.FunctionSignature;
+import org.apache.asterix.external.api.IDataSourceAdapter;
+import org.apache.asterix.external.dataset.adapter.AdapterIdentifier;
+import org.apache.asterix.external.library.ExternalLibrary;
+import org.apache.asterix.external.library.ExternalLibraryManager;
+import org.apache.asterix.external.library.LibraryAdapter;
+import org.apache.asterix.external.library.LibraryFunction;
+import org.apache.asterix.metadata.MetadataManager;
+import org.apache.asterix.metadata.MetadataTransactionContext;
+import org.apache.asterix.metadata.api.IMetadataEntity;
+import org.apache.asterix.metadata.entities.DatasourceAdapter;
+import org.apache.asterix.metadata.entities.Dataverse;
+import org.apache.asterix.metadata.entities.Function;
+import org.apache.asterix.metadata.entities.Library;
+import org.apache.asterix.runtime.formats.NonTaggedDataFormat;
+
+public class ExternalLibraryUtils {
+
+    private static Logger LOGGER = Logger.getLogger(ExternalLibraryUtils.class.getName());
+
+    public static void setUpExternaLibraries(boolean isMetadataNode) throws Exception {
+
+        // start by un-installing removed libraries (Metadata Node only)
+        Map<String, List<String>> uninstalledLibs = null;
+        if (isMetadataNode) {
+            uninstalledLibs = uninstallLibraries();
+        }
+
+        // get the directory of the to be installed libraries
+        File installLibDir = getLibraryInstallDir();
+        // directory exists?
+        if (installLibDir.exists()) {
+            // get the list of files in the directory
+            for (String dataverse : installLibDir.list()) {
+                File dataverseDir = new File(installLibDir, dataverse);
+                String[] libraries = dataverseDir.list();
+                for (String library : libraries) {
+                    // for each file (library), register library
+                    registerLibrary(dataverse, library, isMetadataNode, installLibDir);
+                    // is metadata node?
+                    if (isMetadataNode) {
+                        // get library file
+                        File libraryDir = new File(installLibDir.getAbsolutePath() + File.separator + dataverse
+                                + File.separator + library);
+                        // install if needed (i,e, add the functions, adapters, datasources, parsers to the metadata) <Not required for use>
+                        installLibraryIfNeeded(dataverse, libraryDir, uninstalledLibs);
+                    }
+                }
+            }
+        }
+    }
+
+    /**
+     * un-install libraries.
+     * @return a map from dataverse -> list of uninstalled libraries.
+     * @throws Exception
+     */
+    private static Map<String, List<String>> uninstallLibraries() throws Exception {
+        Map<String, List<String>> uninstalledLibs = new HashMap<String, List<String>>();
+        // get the directory of the un-install libraries
+        File uninstallLibDir = getLibraryUninstallDir();
+        String[] uninstallLibNames;
+        // directory exists?
+        if (uninstallLibDir.exists()) {
+            // list files
+            uninstallLibNames = uninstallLibDir.list();
+            for (String uninstallLibName : uninstallLibNames) {
+                // Get the <dataverse name - library name> pair
+                String[] components = uninstallLibName.split("\\.");
+                String dataverse = components[0];
+                String libName = components[1];
+                // un-install
+                uninstallLibrary(dataverse, libName);
+                // delete the library file
+                new File(uninstallLibDir, uninstallLibName).delete();
+                // add the library to the list of uninstalled libraries
+                List<String> uinstalledLibsInDv = uninstalledLibs.get(dataverse);
+                if (uinstalledLibsInDv == null) {
+                    uinstalledLibsInDv = new ArrayList<String>();
+                    uninstalledLibs.put(dataverse, uinstalledLibsInDv);
+                }
+                uinstalledLibsInDv.add(libName);
+            }
+        }
+        return uninstalledLibs;
+    }
+
+    /**
+     * Remove the library from metadata completely.
+     * TODO Currently, external libraries only include functions and adapters. we need to extend this to include:
+     * 1. external data source
+     * 2. data parser
+     * @param dataverse
+     * @param libraryName
+     * @return true if the library was found and removed, false otherwise
+     * @throws AsterixException
+     * @throws RemoteException
+     * @throws ACIDException
+     */
+    protected static boolean uninstallLibrary(String dataverse, String libraryName)
+            throws AsterixException, RemoteException, ACIDException {
+        MetadataTransactionContext mdTxnCtx = null;
+        try {
+            // begin transaction
+            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+            // make sure dataverse exists
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
+            if (dv == null) {
+                return false;
+            }
+            // make sure library exists
+            Library library = MetadataManager.INSTANCE.getLibrary(mdTxnCtx, dataverse, libraryName);
+            if (library == null) {
+                return false;
+            }
+
+            // get dataverse functions
+            List<Function> functions = MetadataManager.INSTANCE.getDataverseFunctions(mdTxnCtx, dataverse);
+            for (Function function : functions) {
+                // does function belong to library?
+                if (function.getName().startsWith(libraryName + "#")) {
+                    // drop the function
+                    MetadataManager.INSTANCE.dropFunction(mdTxnCtx,
+                            new FunctionSignature(dataverse, function.getName(), function.getArity()));
+                }
+            }
+
+            // get the dataverse adapters
+            List<DatasourceAdapter> adapters = MetadataManager.INSTANCE.getDataverseAdapters(mdTxnCtx, dataverse);
+            for (DatasourceAdapter adapter : adapters) {
+                // belong to the library?
+                if (adapter.getAdapterIdentifier().getName().startsWith(libraryName + "#")) {
+                    // remove adapter <! we didn't check if there are feeds which use this adapter>
+                    MetadataManager.INSTANCE.dropAdapter(mdTxnCtx, dataverse, adapter.getAdapterIdentifier().getName());
+                }
+            }
+            // drop the library itself
+            MetadataManager.INSTANCE.dropLibrary(mdTxnCtx, dataverse, libraryName);
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+            throw new AsterixException(e);
+        }
+        return true;
+    }
+
+    /**
+     *  Each element of a library is installed as part of a transaction. Any
+     *  failure in installing an element does not effect installation of other
+     *  libraries.
+     */
+    protected static void installLibraryIfNeeded(String dataverse, final File libraryDir,
+            Map<String, List<String>> uninstalledLibs) throws Exception {
+
+        String libraryName = libraryDir.getName().trim();
+        List<String> uninstalledLibsInDv = uninstalledLibs.get(dataverse);
+        // was this library just un-installed?
+        boolean wasUninstalled = uninstalledLibsInDv != null && uninstalledLibsInDv.contains(libraryName);
+        MetadataTransactionContext mdTxnCtx = null;
+        try {
+            mdTxnCtx = MetadataManager.INSTANCE.beginTransaction();
+            Library libraryInMetadata = MetadataManager.INSTANCE.getLibrary(mdTxnCtx, dataverse, libraryName);
+            if (libraryInMetadata != null && !wasUninstalled) {
+                // exists in metadata and was not un-installed, we return.
+                // Another place which shows that our metadata transactions are broken (we didn't call commit before!!!)
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                return;
+            }
+
+            // Add library
+            MetadataManager.INSTANCE.addLibrary(mdTxnCtx, new Library(dataverse, libraryName));
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Added library " + libraryName + " to Metadata");
+            }
+
+            // Get the descriptor
+            String[] libraryDescriptors = libraryDir.list(new FilenameFilter() {
+                @Override
+                public boolean accept(File dir, String name) {
+                    return name.endsWith(".xml");
+                }
+            });
+            ExternalLibrary library = getLibrary(new File(libraryDir + File.separator + libraryDescriptors[0]));
+
+            if (libraryDescriptors.length == 0) {
+                // should be fine. library was installed but its content was not added to metadata
+                MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+                return;
+            } else if (libraryDescriptors.length > 1) {
+                throw new Exception("More than 1 library descriptors defined");
+            }
+
+            // Get the dataverse
+            Dataverse dv = MetadataManager.INSTANCE.getDataverse(mdTxnCtx, dataverse);
+            if (dv == null) {
+                MetadataManager.INSTANCE.addDataverse(mdTxnCtx, new Dataverse(dataverse,
+                        NonTaggedDataFormat.NON_TAGGED_DATA_FORMAT, IMetadataEntity.PENDING_NO_OP));
+            }
+            // Add functions
+            if (library.getLibraryFunctions() != null) {
+                for (LibraryFunction function : library.getLibraryFunctions().getLibraryFunction()) {
+                    String[] fargs = function.getArguments().trim().split(",");
+                    List<String> args = new ArrayList<String>();
+                    for (String arg : fargs) {
+                        args.add(arg);
+                    }
+                    Function f = new Function(dataverse, libraryName + "#" + function.getName().trim(), args.size(),
+                            args, function.getReturnType().trim(), function.getDefinition().trim(),
+                            library.getLanguage().trim(), function.getFunctionType().trim());
+                    MetadataManager.INSTANCE.addFunction(mdTxnCtx, f);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Installed function: " + libraryName + "#" + function.getName().trim());
+                    }
+                }
+            }
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Installed functions in library :" + libraryName);
+            }
+
+            // Add adapters
+            if (library.getLibraryAdapters() != null) {
+                for (LibraryAdapter adapter : library.getLibraryAdapters().getLibraryAdapter()) {
+                    String adapterFactoryClass = adapter.getFactoryClass().trim();
+                    String adapterName = libraryName + "#" + adapter.getName().trim();
+                    AdapterIdentifier aid = new AdapterIdentifier(dataverse, adapterName);
+                    DatasourceAdapter dsa = new DatasourceAdapter(aid, adapterFactoryClass,
+                            IDataSourceAdapter.AdapterType.EXTERNAL);
+                    MetadataManager.INSTANCE.addAdapter(mdTxnCtx, dsa);
+                    if (LOGGER.isLoggable(Level.INFO)) {
+                        LOGGER.info("Installed adapter: " + adapterName);
+                    }
+                }
+            }
+
+            if (LOGGER.isLoggable(Level.INFO)) {
+                LOGGER.info("Installed adapters in library :" + libraryName);
+            }
+            MetadataManager.INSTANCE.commitTransaction(mdTxnCtx);
+        } catch (Exception e) {
+            e.printStackTrace();
+            if (LOGGER.isLoggable(Level.SEVERE)) {
+                LOGGER.info("Exception in installing library " + libraryName);
+            }
+            MetadataManager.INSTANCE.abortTransaction(mdTxnCtx);
+        }
+    }
+
+    /**
+     * register the library class loader with the external library manager
+     * @param dataverse
+     * @param libraryName
+     * @param isMetadataNode
+     * @param installLibDir
+     * @throws Exception
+     */
+    protected static void registerLibrary(String dataverse, String libraryName, boolean isMetadataNode,
+            File installLibDir) throws Exception {
+        // get the class loader
+        ClassLoader classLoader = getLibraryClassLoader(dataverse, libraryName);
+        // register it with the external library manager
+        ExternalLibraryManager.registerLibraryClassLoader(dataverse, libraryName, classLoader);
+    }
+
+    /**
+     * Get the library from the xml file
+     * @param libraryXMLPath
+     * @return
+     * @throws Exception
+     */
+    private static ExternalLibrary getLibrary(File libraryXMLPath) throws Exception {
+        JAXBContext configCtx = JAXBContext.newInstance(ExternalLibrary.class);
+        Unmarshaller unmarshaller = configCtx.createUnmarshaller();
+        ExternalLibrary library = (ExternalLibrary) unmarshaller.unmarshal(libraryXMLPath);
+        return library;
+    }
+
+    /**
+     * Get the class loader for the library
+     * @param dataverse
+     * @param libraryName
+     * @return
+     * @throws Exception
+     */
+    private static ClassLoader getLibraryClassLoader(String dataverse, String libraryName) throws Exception {
+        // Get a reference to the library directory
+        File installDir = getLibraryInstallDir();
+        if (LOGGER.isLoggable(Level.INFO)) {
+            LOGGER.info("Installing lirbary " + libraryName + " in dataverse " + dataverse + "."
+                    + " Install Directory: " + installDir.getAbsolutePath());
+        }
+
+        // get a reference to the specific library dir
+        File libDir = new File(
+                installDir.getAbsolutePath() + File.separator + dataverse + File.separator + libraryName);
+        FilenameFilter jarFileFilter = new FilenameFilter() {
+            @Override
+            public boolean accept(File dir, String name) {
+                return name.endsWith(".jar");
+            }
+        };
+
+        // Get the jar file <Allow only a single jar file>
+        String[] jarsInLibDir = libDir.list(jarFileFilter);
+        if (jarsInLibDir.length > 1) {
+            throw new Exception("Incorrect library structure: found multiple library jars");
+        }
+        if (jarsInLibDir.length < 0) {
+            throw new Exception("Incorrect library structure: could not find library jar");
+        }
+
+        File libJar = new File(libDir, jarsInLibDir[0]);
+        // get the jar dependencies
+        File libDependencyDir = new File(libDir.getAbsolutePath() + File.separator + "lib");
+        int numDependencies = 1;
+        String[] libraryDependencies = null;
+        if (libDependencyDir.exists()) {
+            libraryDependencies = libDependencyDir.list(jarFileFilter);
+            numDependencies += libraryDependencies.length;
+        }
+
+        ClassLoader parentClassLoader = ExternalLibraryUtils.class.getClassLoader();
+        URL[] urls = new URL[numDependencies];
+        int count = 0;
+        // get url of library
+        urls[count++] = libJar.toURI().toURL();
+
+        // get urls for dependencies
+        if (libraryDependencies != null && libraryDependencies.length > 0) {
+            for (String dependency : libraryDependencies) {
+                File file = new File(libDependencyDir + File.separator + dependency);
+                urls[count++] = file.toURI().toURL();
+            }
+        }
+
+        if (LOGGER.isLoggable(Level.INFO)) {
+            StringBuilder logMesg = new StringBuilder("Classpath for library " + libraryName + "\n");
+            for (URL url : urls) {
+                logMesg.append(url.getFile() + "\n");
+            }
+            LOGGER.info(logMesg.toString());
+        }
+
+        // create and return the class loader
+        ClassLoader classLoader = new URLClassLoader(urls, parentClassLoader);
+        return classLoader;
+    }
+
+    /**
+     *  @return the directory "$(pwd)/library": This needs to be improved
+     */
+    protected static File getLibraryInstallDir() {
+        String workingDir = System.getProperty("user.dir");
+        return new File(workingDir + File.separator + "library");
+    }
+
+    /**
+     * @return the directory "$(pwd)/uninstall": This needs to be improved
+     */
+    protected static File getLibraryUninstallDir() {
+        String workingDir = System.getProperty("user.dir");
+        return new File(workingDir + File.separator + "uninstall");
+    }
+
+}


[16/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java
index 06f7e72..e39b507 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/input/stream/provider/TwitterFirehoseInputStreamProvider.java
@@ -30,7 +30,9 @@ import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import org.apache.asterix.external.api.IInputStreamProvider;
+import org.apache.asterix.external.dataflow.AbstractFeedDataFlowController;
 import org.apache.asterix.external.input.stream.AInputStream;
+import org.apache.asterix.external.util.FeedLogManager;
 import org.apache.asterix.external.util.TweetGenerator;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 
@@ -80,8 +82,11 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
             return true;
         }
 
-        public void start() {
-            executorService.execute(dataProvider);
+        public synchronized void start() {
+            if (!started) {
+                executorService.execute(dataProvider);
+                started = true;
+            }
         }
 
         @Override
@@ -93,7 +98,6 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
         public int read() throws IOException {
             if (!started) {
                 start();
-                started = true;
             }
             return in.read();
         }
@@ -106,6 +110,18 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
             }
             return in.read(b, off, len);
         }
+
+        @Override
+        public void configure(Map<String, String> configuration) {
+        }
+
+        @Override
+        public void setFeedLogManager(FeedLogManager logManager) {
+        }
+
+        @Override
+        public void setController(AbstractFeedDataFlowController controller) {
+        }
     }
 
     private static class DataProvider implements Runnable {
@@ -170,7 +186,7 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
                     break;
                 } catch (Exception e) {
                     if (LOGGER.isLoggable(Level.WARNING)) {
-                        LOGGER.warning("Exception in adaptor " + e.getMessage());
+                        LOGGER.warning("Exception in adapter " + e.getMessage());
                     }
                 }
             }
@@ -181,4 +197,12 @@ public class TwitterFirehoseInputStreamProvider implements IInputStreamProvider
         }
 
     }
+
+    @Override
+    public void configure(Map<String, String> configuration) {
+    }
+
+    @Override
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
index 3dea50c..df0ddc8 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalFunctionProvider.java
@@ -63,8 +63,8 @@ class ExternalScalarFunction extends ExternalFunction implements IExternalScalar
         try {
             setArguments(tuple);
             evaluate(functionHelper);
-            functionHelper.reset();
             result.set(resultBuffer.getByteArray(), resultBuffer.getStartOffset(), resultBuffer.getLength());
+            functionHelper.reset();
         } catch (Exception e) {
             e.printStackTrace();
             throw new AlgebricksException(e);
@@ -73,6 +73,7 @@ class ExternalScalarFunction extends ExternalFunction implements IExternalScalar
 
     @Override
     public void evaluate(IFunctionHelper argumentProvider) throws Exception {
+        resultBuffer.reset();
         ((IExternalScalarFunction) externalFunction).evaluate(argumentProvider);
         /*
          * Make sure that if "setResult" is not called,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
index 7ad6cfa..db85e2f 100755
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/library/ExternalLibraryManager.java
@@ -23,8 +23,14 @@ import java.util.Map;
 
 public class ExternalLibraryManager {
 
-    private static Map<String, ClassLoader> libraryClassLoaders = new HashMap<String, ClassLoader>();
+    private static final Map<String, ClassLoader> libraryClassLoaders = new HashMap<String, ClassLoader>();
 
+    /**
+     * Register the library class loader with the external library manager
+     * @param dataverseName
+     * @param libraryName
+     * @param classLoader
+     */
     public static void registerLibraryClassLoader(String dataverseName, String libraryName, ClassLoader classLoader) {
         String key = getKey(dataverseName, libraryName);
         synchronized (libraryClassLoaders) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java
index a929eec..7e28c35 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedCollectOperatorDescriptor.java
@@ -23,11 +23,11 @@ import java.util.logging.Level;
 import java.util.logging.Logger;
 
 import org.apache.asterix.common.api.IAsterixAppRuntimeContext;
+import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
 import org.apache.asterix.external.feed.api.IFeedManager;
+import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
 import org.apache.asterix.external.feed.api.IFeedSubscriptionManager;
 import org.apache.asterix.external.feed.api.ISubscribableRuntime;
-import org.apache.asterix.external.feed.api.IFeedLifecycleListener.ConnectionLocation;
-import org.apache.asterix.external.feed.api.IFeedRuntime.FeedRuntimeType;
 import org.apache.asterix.external.feed.management.FeedConnectionId;
 import org.apache.asterix.external.feed.management.FeedId;
 import org.apache.asterix.external.feed.runtime.IngestionRuntime;
@@ -151,7 +151,7 @@ public class FeedCollectOperatorDescriptor extends AbstractSingleActivityOperato
         ISubscribableRuntime ingestionRuntime = subscriptionManager.getSubscribableRuntime(subscribableRuntimeId);
         while (ingestionRuntime == null && waitCycleCount < 10) {
             try {
-                Thread.sleep(2000);
+                Thread.sleep(3000);
                 waitCycleCount++;
                 if (LOGGER.isLoggable(Level.INFO)) {
                     LOGGER.info("waiting to obtain ingestion runtime for subscription " + subscribableRuntimeId);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
index 20c11b3..d41179f 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaComputeNodePushable.java
@@ -36,12 +36,14 @@ import org.apache.asterix.external.feed.runtime.FeedRuntime;
 import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
 import org.apache.asterix.external.feed.runtime.SubscribableFeedRuntimeId;
 import org.apache.asterix.external.feed.runtime.SubscribableRuntime;
+import org.apache.asterix.external.util.FeedUtils;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.IActivity;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
 import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.io.MessagingFrameTupleAppender;
 import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
 
 /*
@@ -91,6 +93,8 @@ public class FeedMetaComputeNodePushable extends AbstractUnaryInputUnaryOutputOp
 
     private FeedRuntimeInputHandler inputSideHandler;
 
+    private ByteBuffer message = ByteBuffer.allocate(MessagingFrameTupleAppender.MAX_MESSAGE_SIZE);
+
     public FeedMetaComputeNodePushable(IHyracksTaskContext ctx, IRecordDescriptorProvider recordDescProvider,
             int partition, int nPartitions, IOperatorDescriptor coreOperator, FeedConnectionId feedConnectionId,
             Map<String, String> feedPolicyProperties, String operationId) throws HyracksDataException {
@@ -103,6 +107,7 @@ public class FeedMetaComputeNodePushable extends AbstractUnaryInputUnaryOutputOp
         this.connectionId = feedConnectionId;
         this.feedManager = (IFeedManager) ((IAsterixAppRuntimeContext) ctx.getJobletContext().getApplicationContext()
                 .getApplicationObject()).getFeedManager();
+        ctx.setSharedObject(message);
     }
 
     @Override
@@ -126,7 +131,8 @@ public class FeedMetaComputeNodePushable extends AbstractUnaryInputUnaryOutputOp
     private void initializeNewFeedRuntime(FeedRuntimeId runtimeId) throws Exception {
         this.fta = new FrameTupleAccessor(recordDesc);
         this.inputSideHandler = new FeedRuntimeInputHandler(ctx, connectionId, runtimeId, coreOperator,
-                policyEnforcer.getFeedPolicyAccessor(), true, fta, recordDesc, feedManager, nPartitions);
+                policyEnforcer.getFeedPolicyAccessor(), policyEnforcer.getFeedPolicyAccessor().bufferingEnabled(), fta,
+                recordDesc, feedManager, nPartitions);
 
         DistributeFeedFrameWriter distributeWriter = new DistributeFeedFrameWriter(ctx, connectionId.getFeedId(),
                 writer, runtimeType, partition, new FrameTupleAccessor(recordDesc), feedManager);
@@ -157,6 +163,7 @@ public class FeedMetaComputeNodePushable extends AbstractUnaryInputUnaryOutputOp
     @Override
     public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
         try {
+            FeedUtils.processFeedMessage(buffer, message, fta);
             inputSideHandler.nextFrame(buffer);
         } catch (Exception e) {
             e.printStackTrace();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
index c596671..018aeaa 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/operators/FeedMetaStoreNodePushable.java
@@ -33,7 +33,7 @@ import org.apache.asterix.external.feed.management.FeedConnectionId;
 import org.apache.asterix.external.feed.policy.FeedPolicyEnforcer;
 import org.apache.asterix.external.feed.runtime.FeedRuntime;
 import org.apache.asterix.external.feed.runtime.FeedRuntimeId;
-import org.apache.hyracks.api.comm.FrameHelper;
+import org.apache.asterix.external.util.FeedUtils;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.dataflow.IActivity;
 import org.apache.hyracks.api.dataflow.IOperatorDescriptor;
@@ -41,7 +41,6 @@ import org.apache.hyracks.api.dataflow.value.IRecordDescriptorProvider;
 import org.apache.hyracks.api.exceptions.HyracksDataException;
 import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
 import org.apache.hyracks.dataflow.common.io.MessagingFrameTupleAppender;
-import org.apache.hyracks.dataflow.common.util.IntSerDeUtils;
 import org.apache.hyracks.dataflow.std.base.AbstractUnaryInputUnaryOutputOperatorNodePushable;
 
 public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOperatorNodePushable {
@@ -119,7 +118,6 @@ public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOper
             } else {
                 reviveOldFeedRuntime(runtimeId);
             }
-
             coreOperator.open();
         } catch (Exception e) {
             LOGGER.log(Level.WARNING, "Failed to open feed store operator", e);
@@ -167,7 +165,7 @@ public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOper
     @Override
     public void nextFrame(ByteBuffer buffer) throws HyracksDataException {
         try {
-            processFeedMessage(buffer);
+            FeedUtils.processFeedMessage(buffer, message, fta);
             inputSideHandler.nextFrame(buffer);
         } catch (Exception e) {
             e.printStackTrace();
@@ -175,18 +173,6 @@ public class FeedMetaStoreNodePushable extends AbstractUnaryInputUnaryOutputOper
         }
     }
 
-    private void processFeedMessage(ByteBuffer buffer) {
-        // read the message and reduce the number of tuples
-        fta.reset(buffer);
-        int tc = fta.getTupleCount() - 1;
-        int offset = fta.getTupleStartOffset(tc);
-        int len = fta.getTupleLength(tc);
-        message.clear();
-        message.put(buffer.array(), offset, len);
-        message.flip();
-        IntSerDeUtils.putInt(buffer.array(), FrameHelper.getTupleCountOffset(buffer.capacity()), tc);
-    }
-
     @Override
     public void fail() throws HyracksDataException {
         if (LOGGER.isLoggable(Level.WARNING)) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
index 93aa18b..60c80f1 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/parser/ADMDataParser.java
@@ -265,7 +265,8 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
                 break;
             }
             case AdmLexer.TOKEN_INT_LITERAL: {
-                // For an INT value without any suffix, we return it as INT64 type value since it is the default integer type.
+                // For an INT value without any suffix, we return it as INT64 type value since it is
+                // the default integer type.
                 parseAndCastNumeric(ATypeTag.INT64, objectType, out);
                 break;
             }
@@ -506,7 +507,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             } else {
                 return null;
             }
-            //            return ATypeHierarchy.canPromote(expectedTypeTag, typeTag) ? typeTag : null;
+            // return ATypeHierarchy.canPromote(expectedTypeTag, typeTag) ? typeTag : null;
         } else { // union
             List<IAType> unionList = ((AUnionType) aObjectType).getUnionList();
             for (IAType t : unionList) {
@@ -531,7 +532,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
 
         BitSet nulls = null;
         if (recType != null) {
-            //TODO: use BitSet Pool
+            // TODO: use BitSet Pool
             nulls = new BitSet(recType.getFieldNames().length);
             recBuilder.reset(recType);
         } else {
@@ -569,7 +570,8 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
                                 admLexer.getLastTokenImage().length() - 1);
                         fieldId = recBuilder.getFieldId(fldName);
                         if (fieldId < 0 && !recType.isOpen()) {
-                            throw new ParseException("This record is closed, you can not add extra fields !!");
+                            throw new ParseException(
+                                    "This record is closed, you can not add extra fields! new field name: " + fldName);
                         } else if (fieldId < 0 && recType.isOpen()) {
                             aStringFieldName.setValue(admLexer.getLastTokenImage().substring(1,
                                     admLexer.getLastTokenImage().length() - 1));
@@ -895,7 +897,8 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
             throw new ParseException(mismatchErrorMessage + objectType.getTypeName() + mismatchErrorMessage2 + typeTag);
         }
 
-        // If two type tags are not the same, either we try to promote or demote source type to the target type
+        // If two type tags are not the same, either we try to promote or demote source type to the
+        // target type
         if (targetTypeTag != typeTag) {
             if (ATypeHierarchy.canPromote(typeTag, targetTypeTag)) {
                 // can promote typeTag to targetTypeTag
@@ -907,7 +910,7 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
                 promoteComputer.convertType(castBuffer.getByteArray(), castBuffer.getStartOffset() + 1,
                         castBuffer.getLength() - 1, out);
             } else if (ATypeHierarchy.canDemote(typeTag, targetTypeTag)) {
-                //can demote source type to the target type
+                // can demote source type to the target type
                 ITypeConvertComputer demoteComputer = ATypeHierarchy.getTypeDemoteComputer(typeTag, targetTypeTag);
                 if (demoteComputer == null) {
                     throw new ParseException("Can't cast the " + typeTag + " type to the " + targetTypeTag + " type.");
@@ -942,7 +945,8 @@ public class ADMDataParser extends AbstractDataParser implements IStreamDataPars
                         if (targetTypeTag != typeTag) {
                             ITypeConvertComputer promoteComputer = ATypeHierarchy.getTypePromoteComputer(typeTag,
                                     targetTypeTag);
-                            // the availability if the promote computer should be consistent with the availability of a target type
+                            // the availability if the promote computer should be consistent with
+                            // the availability of a target type
                             assert promoteComputer != null;
                             // do the promotion; note that the type tag field should be skipped
                             promoteComputer.convertType(castBuffer.getByteArray(), castBuffer.getStartOffset() + 1,

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
index c5b39df..efbc6bf 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/AdapterFactoryProvider.java
@@ -57,6 +57,7 @@ public class AdapterFactoryProvider {
         // Compatability
         adapterFactories.put(ExternalDataConstants.ADAPTER_HDFS_CLASSNAME, GenericAdapterFactory.class);
         adapterFactories.put(ExternalDataConstants.ADAPTER_LOCALFS_CLASSNAME, GenericAdapterFactory.class);
+        adapterFactories.put(ExternalDataConstants.ALIAS_TWITTER_FIREHOSE_ADAPTER, GenericAdapterFactory.class);
         return adapterFactories;
     }
 

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
index dfe7aed..6b4b6ba 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DataflowControllerProvider.java
@@ -28,7 +28,6 @@ import org.apache.asterix.external.api.IInputStreamProvider;
 import org.apache.asterix.external.api.IInputStreamProviderFactory;
 import org.apache.asterix.external.api.IRecordDataParser;
 import org.apache.asterix.external.api.IRecordDataParserFactory;
-import org.apache.asterix.external.api.IRecordFlowController;
 import org.apache.asterix.external.api.IRecordReader;
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.api.IStreamDataParser;
@@ -39,10 +38,13 @@ import org.apache.asterix.external.dataflow.FeedStreamDataFlowController;
 import org.apache.asterix.external.dataflow.IndexingDataFlowController;
 import org.apache.asterix.external.dataflow.RecordDataFlowController;
 import org.apache.asterix.external.dataflow.StreamDataFlowController;
+import org.apache.asterix.external.input.stream.AInputStream;
 import org.apache.asterix.external.util.DataflowUtils;
-import org.apache.asterix.external.util.ExternalDataUtils;
+import org.apache.asterix.external.util.FeedLogManager;
+import org.apache.asterix.external.util.FeedUtils;
 import org.apache.asterix.om.types.ARecordType;
 import org.apache.hyracks.api.context.IHyracksTaskContext;
+import org.apache.hyracks.dataflow.std.file.FileSplit;
 
 public class DataflowControllerProvider {
 
@@ -57,52 +59,67 @@ public class DataflowControllerProvider {
      * else
      * |-a. Set stream parser
      * 5. start(writer)
+     * @param feedLogFileSplits
+     * @param isFeed
      */
 
+    // TODO: Instead, use a factory just like data source and data parser.
     @SuppressWarnings({ "rawtypes", "unchecked" })
     public static IDataFlowController getDataflowController(ARecordType recordType, IHyracksTaskContext ctx,
             int partition, IExternalDataSourceFactory dataSourceFactory, IDataParserFactory dataParserFactory,
-            Map<String, String> configuration, boolean indexingOp) throws Exception {
+            Map<String, String> configuration, boolean indexingOp, boolean isFeed, FileSplit[] feedLogFileSplits)
+                    throws Exception {
+        FeedLogManager feedLogManager = null;
+        if (isFeed) {
+            feedLogManager = FeedUtils.getFeedLogManager(ctx, partition, feedLogFileSplits);
+        }
         switch (dataSourceFactory.getDataSourceType()) {
             case RECORDS:
-                IRecordFlowController recordDataFlowController = null;
-                if (indexingOp) {
-                    recordDataFlowController = new IndexingDataFlowController();
-                } else if (ExternalDataUtils.isFeed(configuration)) {
-                    recordDataFlowController = new FeedRecordDataFlowController();
-                } else {
-                    recordDataFlowController = new RecordDataFlowController();
-                }
-                recordDataFlowController.configure(configuration, ctx);
-                recordDataFlowController.setTupleForwarder(DataflowUtils.getTupleForwarder(configuration));
+                IDataFlowController recordDataFlowController = null;
                 IRecordReaderFactory<?> recordReaderFactory = (IRecordReaderFactory<?>) dataSourceFactory;
                 IRecordReader<?> recordReader = recordReaderFactory.createRecordReader(ctx, partition);
+                recordReader.configure(configuration);
                 IRecordDataParserFactory<?> recordParserFactory = (IRecordDataParserFactory<?>) dataParserFactory;
                 IRecordDataParser<?> dataParser = recordParserFactory.createRecordParser(ctx);
                 dataParser.configure(configuration, recordType);
-                recordDataFlowController.setRecordReader(recordReader);
-                recordDataFlowController.setRecordParser(dataParser);
+                if (indexingOp) {
+                    recordDataFlowController = new IndexingDataFlowController(dataParser, recordReader);
+                } else if (isFeed) {
+                    recordDataFlowController = new FeedRecordDataFlowController(feedLogManager, dataParser,
+                            recordReader);
+                } else {
+                    recordDataFlowController = new RecordDataFlowController(dataParser, recordReader);
+                }
+                recordDataFlowController.configure(configuration, ctx);
+                recordDataFlowController
+                        .setTupleForwarder(DataflowUtils.getTupleForwarder(configuration, feedLogManager));
                 return recordDataFlowController;
             case STREAM:
                 IStreamFlowController streamDataFlowController = null;
-                if (ExternalDataUtils.isFeed(configuration)) {
-                    streamDataFlowController = new FeedStreamDataFlowController();
+                if (isFeed) {
+                    streamDataFlowController = new FeedStreamDataFlowController(feedLogManager);
                 } else {
                     streamDataFlowController = new StreamDataFlowController();
                 }
                 streamDataFlowController.configure(configuration, ctx);
-                streamDataFlowController.setTupleForwarder(DataflowUtils.getTupleForwarder(configuration));
+                streamDataFlowController
+                        .setTupleForwarder(DataflowUtils.getTupleForwarder(configuration, feedLogManager));
                 IInputStreamProviderFactory streamProviderFactory = (IInputStreamProviderFactory) dataSourceFactory;
+                streamProviderFactory.configure(configuration);
                 IInputStreamProvider streamProvider = streamProviderFactory.createInputStreamProvider(ctx, partition);
+                streamProvider.setFeedLogManager(feedLogManager);
+                streamProvider.configure(configuration);
                 IStreamDataParserFactory streamParserFactory = (IStreamDataParserFactory) dataParserFactory;
                 streamParserFactory.configure(configuration);
                 IStreamDataParser streamParser = streamParserFactory.createInputStreamParser(ctx, partition);
                 streamParser.configure(configuration, recordType);
-                streamParser.setInputStream(streamProvider.getInputStream());
+                AInputStream inputStream = streamProvider.getInputStream();
+                streamParser.setInputStream(inputStream);
                 streamDataFlowController.setStreamParser(streamParser);
                 return streamDataFlowController;
             default:
                 throw new AsterixException("Unknown data source type: " + dataSourceFactory.getDataSourceType());
         }
     }
+
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
index 745c653..0d65f72 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/provider/DatasourceFactoryProvider.java
@@ -26,11 +26,13 @@ import org.apache.asterix.external.api.IInputStreamProviderFactory;
 import org.apache.asterix.external.api.IRecordReaderFactory;
 import org.apache.asterix.external.input.HDFSDataSourceFactory;
 import org.apache.asterix.external.input.record.reader.couchbase.CouchbaseReaderFactory;
+import org.apache.asterix.external.input.record.reader.stream.EmptyLineSeparatedRecordReaderFactory;
 import org.apache.asterix.external.input.record.reader.stream.LineRecordReaderFactory;
 import org.apache.asterix.external.input.record.reader.stream.SemiStructuredRecordReaderFactory;
 import org.apache.asterix.external.input.record.reader.twitter.TwitterRecordReaderFactory;
 import org.apache.asterix.external.input.stream.factory.LocalFSInputStreamProviderFactory;
 import org.apache.asterix.external.input.stream.factory.SocketInputStreamProviderFactory;
+import org.apache.asterix.external.input.stream.factory.TwitterFirehoseStreamProviderFactory;
 import org.apache.asterix.external.util.ExternalDataConstants;
 import org.apache.asterix.external.util.ExternalDataUtils;
 
@@ -65,6 +67,9 @@ public class DatasourceFactoryProvider {
                 case ExternalDataConstants.STREAM_SOCKET:
                     streamFactory = new SocketInputStreamProviderFactory();
                     break;
+                case ExternalDataConstants.ALIAS_TWITTER_FIREHOSE_ADAPTER:
+                    streamFactory = new TwitterFirehoseStreamProviderFactory();
+                    break;
                 default:
                     throw new AsterixException("unknown input stream factory");
             }
@@ -101,6 +106,11 @@ public class DatasourceFactoryProvider {
                 case ExternalDataConstants.READER_COUCHBASE:
                     readerFactory = new CouchbaseReaderFactory();
                     break;
+                case ExternalDataConstants.READER_LINE_SEPARATED:
+                    readerFactory = new EmptyLineSeparatedRecordReaderFactory()
+                            .setInputStreamFactoryProvider(DatasourceFactoryProvider.getInputStreamFactory(
+                                    ExternalDataUtils.getRecordReaderStreamName(configuration), configuration));
+                    break;
                 default:
                     throw new AsterixException("unknown record reader factory: " + reader);
             }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
index e604d42..cab8a69 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/DataflowUtils.java
@@ -43,12 +43,13 @@ public class DataflowUtils {
         }
     }
 
-    public static ITupleForwarder getTupleForwarder(Map<String, String> configuration) throws AsterixException {
+    public static ITupleForwarder getTupleForwarder(Map<String, String> configuration, FeedLogManager feedLogManager)
+            throws AsterixException {
         ITupleForwarder policy = null;
         ITupleForwarder.TupleForwardPolicy policyType = null;
         String propValue = configuration.get(ITupleForwarder.FORWARD_POLICY);
         if (ExternalDataUtils.isFeed(configuration)) {
-            //TODO pass this value in the configuration and avoid this check for feeds
+            // TODO pass this value in the configuration and avoid this check for feeds
             policyType = TupleForwardPolicy.FEED;
         } else if (propValue == null) {
             policyType = TupleForwardPolicy.FRAME_FULL;
@@ -57,7 +58,7 @@ public class DataflowUtils {
         }
         switch (policyType) {
             case FEED:
-                policy = new FeedTupleForwarder();
+                policy = new FeedTupleForwarder(feedLogManager);
                 break;
             case FRAME_FULL:
                 policy = new FrameFullTupleForwarder();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
index 7bfe698..035c1c3 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataCompatibilityUtils.java
@@ -48,7 +48,7 @@ public class ExternalDataCompatibilityUtils {
         }
     }
 
-    //TODO:Add remaining aliases
+    // TODO:Add remaining aliases
     public static void addCompatabilityParameters(String adapterName, ARecordType itemType,
             Map<String, String> configuration) throws AsterixException {
         // HDFS
@@ -71,22 +71,29 @@ public class ExternalDataCompatibilityUtils {
         if (adapterName.equals(ExternalDataConstants.ALIAS_LOCALFS_ADAPTER)
                 || adapterName.contains(ExternalDataConstants.ADAPTER_LOCALFS_CLASSNAME)
                 || adapterName.contains(ExternalDataConstants.ALIAS_LOCALFS_PUSH_ADAPTER)) {
-            if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
-                throw new AsterixException("Unspecified format parameter for local file system adapter");
+            if (configuration.get(ExternalDataConstants.KEY_READER) == null) {
+                if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
+                    // If reader is specified, we will use the selected reader. If format is
+                    // specified, we will assign a suitable reader for the format.
+                    // TODO: better error message
+                    throw new AsterixException(
+                            "Unspecified (\"reader\" or \"format\") parameter for local filesystem adapter");
+                }
+                configuration.put(ExternalDataConstants.KEY_READER,
+                        configuration.get(ExternalDataConstants.KEY_FORMAT));
+                configuration.put(ExternalDataConstants.KEY_READER_STREAM, ExternalDataConstants.ALIAS_LOCALFS_ADAPTER);
             }
-            configuration.put(ExternalDataConstants.KEY_READER, configuration.get(ExternalDataConstants.KEY_FORMAT));
-            configuration.put(ExternalDataConstants.KEY_READER_STREAM, ExternalDataConstants.ALIAS_LOCALFS_ADAPTER);
         }
 
         // Socket
-        if (adapterName.equalsIgnoreCase(ExternalDataConstants.ALIAS_SOCKET_ADAPTER)) {
+        if (adapterName.equalsIgnoreCase(ExternalDataConstants.ALIAS_SOCKET_ADAPTER)
+                || adapterName.equalsIgnoreCase(ExternalDataConstants.ALIAS_SOCKET_CLIENT_ADAPTER)) {
             if (configuration.get(ExternalDataConstants.KEY_FORMAT) == null) {
                 throw new AsterixException("Unspecified format parameter for socket adapter");
             }
             configuration.put(ExternalDataConstants.KEY_READER, configuration.get(ExternalDataConstants.KEY_FORMAT));
             configuration.put(ExternalDataConstants.KEY_READER_STREAM, ExternalDataConstants.STREAM_SOCKET);
         }
-
         // Twitter (Pull)
         if (adapterName.equals(ExternalDataConstants.ALIAS_TWITTER_PULL_ADAPTER)) {
             configuration.put(ExternalDataConstants.KEY_READER, ExternalDataConstants.READER_TWITTER_PULL);

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
index 9bac07c..4b2826c 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataConstants.java
@@ -118,6 +118,7 @@ public class ExternalDataConstants {
     public static final String READER_DELIMITED = "delimited-text";
     public static final String READER_TWITTER_PUSH = "twitter-push";
     public static final String READER_TWITTER_PULL = "twitter-pull";
+    public static final String READER_LINE_SEPARATED = "line-separated";
 
     public static final String CLUSTER_LOCATIONS = "cluster-locations";
     public static final String SCHEDULER = "hdfs-scheduler";
@@ -204,4 +205,5 @@ public class ExternalDataConstants {
      * Expected parameter values
      */
     public static final String PARAMETER_OF_SIZE_ONE = "Value of size 1";
+    public static final String LARGE_RECORD_ERROR_MESSAGE = "Record is too large";
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
index 7c03e4d..c36b629 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/ExternalDataUtils.java
@@ -93,7 +93,8 @@ public class ExternalDataUtils {
     }
 
     public static boolean isExternal(String aString) {
-        return (aString.contains(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR) && aString.trim().length() > 1);
+        return (aString != null && aString.contains(ExternalDataConstants.EXTERNAL_LIBRARY_SEPARATOR)
+                && aString.trim().length() > 1);
     }
 
     public static ClassLoader getClassLoader(String dataverse, String library) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
index 72b438d..4737727 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedLogManager.java
@@ -37,7 +37,8 @@ public class FeedLogManager {
         START,      // partition start
         END,        // partition end
         COMMIT,     // a record commit within a partition
-        SNAPSHOT    // an identifier that partitions with identifiers before this one should be ignored
+        SNAPSHOT    // an identifier that partitions with identifiers before this one should be
+                    // ignored
     }
 
     public static final String PROGRESS_LOG_FILE_NAME = "progress.log";
@@ -52,10 +53,15 @@ public class FeedLogManager {
     private BufferedWriter progressLogger;
     private BufferedWriter errorLogger;
     private BufferedWriter recordLogger;
+    private StringBuilder stringBuilder = new StringBuilder();
 
-    public FeedLogManager(File file) {
+    public FeedLogManager(File file) throws IOException {
         this.dir = file.toPath();
         this.completed = new TreeSet<String>();
+        if (!exists()) {
+            create();
+        }
+        open();
     }
 
     public void endPartition() throws IOException {
@@ -124,22 +130,31 @@ public class FeedLogManager {
     }
 
     public void logProgress(String log) throws IOException {
-        progressLogger.write(log);
-        progressLogger.newLine();
+        stringBuilder.setLength(0);
+        stringBuilder.append(log);
+        stringBuilder.append(ExternalDataConstants.LF);
+        progressLogger.write(stringBuilder.toString());
+        progressLogger.flush();
     }
 
     public void logError(String error, Throwable th) throws IOException {
-        errorLogger.append(error);
-        errorLogger.newLine();
-        errorLogger.append(th.toString());
-        errorLogger.newLine();
+        stringBuilder.setLength(0);
+        stringBuilder.append(error);
+        stringBuilder.append(ExternalDataConstants.LF);
+        stringBuilder.append(th.toString());
+        stringBuilder.append(ExternalDataConstants.LF);
+        errorLogger.write(stringBuilder.toString());
+        errorLogger.flush();
     }
 
-    public void logRecord(String record, Exception e) throws IOException {
-        recordLogger.append(record);
-        recordLogger.newLine();
-        recordLogger.append(e.toString());
-        recordLogger.newLine();
+    public void logRecord(String record, String errorMessage) throws IOException {
+        stringBuilder.setLength(0);
+        stringBuilder.append(record);
+        stringBuilder.append(ExternalDataConstants.LF);
+        stringBuilder.append(errorMessage);
+        stringBuilder.append(ExternalDataConstants.LF);
+        recordLogger.write(stringBuilder.toString());
+        recordLogger.flush();
     }
 
     public static String getSplitId(String log) {

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
index 224ee31..c128545 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FeedUtils.java
@@ -19,6 +19,8 @@
 package org.apache.asterix.external.util;
 
 import java.io.File;
+import java.io.IOException;
+import java.nio.ByteBuffer;
 import java.util.ArrayList;
 import java.util.List;
 
@@ -29,8 +31,12 @@ import org.apache.hyracks.algebricks.common.constraints.AlgebricksAbsolutePartit
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint;
 import org.apache.hyracks.algebricks.common.constraints.AlgebricksPartitionConstraint.PartitionConstraintType;
 import org.apache.hyracks.algebricks.common.exceptions.AlgebricksException;
+import org.apache.hyracks.api.comm.FrameHelper;
+import org.apache.hyracks.api.context.IHyracksTaskContext;
 import org.apache.hyracks.api.io.FileReference;
 import org.apache.hyracks.api.io.IIOManager;
+import org.apache.hyracks.dataflow.common.comm.io.FrameTupleAccessor;
+import org.apache.hyracks.dataflow.common.util.IntSerDeUtils;
 import org.apache.hyracks.dataflow.std.file.FileSplit;
 
 public class FeedUtils {
@@ -38,13 +44,27 @@ public class FeedUtils {
         return dataverseName + File.separator + feedName;
     }
 
+    public static FileSplit[] splitsForAdapter(String dataverseName, String feedName, String nodeName, int partition) {
+        File relPathFile = new File(prepareDataverseFeedName(dataverseName, feedName));
+        String storageDirName = AsterixClusterProperties.INSTANCE.getStorageDirectoryName();
+        ClusterPartition nodePartition = AsterixClusterProperties.INSTANCE.getNodePartitions(nodeName)[0];
+        String storagePartitionPath = StoragePathUtil.prepareStoragePartitionPath(storageDirName,
+                nodePartition.getPartitionId());
+        // format: 'storage dir name'/partition_#/dataverse/feed/adapter_#
+        File f = new File(storagePartitionPath + File.separator + relPathFile + File.separator
+                + StoragePathUtil.ADAPTER_INSTANCE_PREFIX + partition);
+        return new FileSplit[] { StoragePathUtil.getFileSplitForClusterPartition(nodePartition, f) };
+    }
+
     public static FileSplit[] splitsForAdapter(String dataverseName, String feedName,
             AlgebricksPartitionConstraint partitionConstraints) throws Exception {
         File relPathFile = new File(prepareDataverseFeedName(dataverseName, feedName));
+        String[] locations = null;
         if (partitionConstraints.getPartitionConstraintType() == PartitionConstraintType.COUNT) {
             throw new AlgebricksException("Can't create file splits for adapter with count partitioning constraints");
+        } else {
+            locations = ((AlgebricksAbsolutePartitionConstraint) partitionConstraints).getLocations();
         }
-        String[] locations = ((AlgebricksAbsolutePartitionConstraint) partitionConstraints).getLocations();
         List<FileSplit> splits = new ArrayList<FileSplit>();
         String storageDirName = AsterixClusterProperties.INSTANCE.getStorageDirectoryName();
         int i = 0;
@@ -66,4 +86,22 @@ public class FeedUtils {
         return ioManager.getAbsoluteFileRef(ioDeviceId, relativePath);
     }
 
+    public static FeedLogManager getFeedLogManager(IHyracksTaskContext ctx, int partition,
+            FileSplit[] feedLogFileSplits) throws IOException {
+        return new FeedLogManager(
+                FeedUtils.getAbsoluteFileRef(feedLogFileSplits[partition].getLocalFile().getFile().getPath(),
+                        feedLogFileSplits[partition].getIODeviceId(), ctx.getIOManager()).getFile());
+    }
+
+    public static void processFeedMessage(ByteBuffer input, ByteBuffer message, FrameTupleAccessor fta) {
+        // read the message and reduce the number of tuples
+        fta.reset(input);
+        int tc = fta.getTupleCount() - 1;
+        int offset = fta.getTupleStartOffset(tc);
+        int len = fta.getTupleLength(tc);
+        message.clear();
+        message.put(input.array(), offset, len);
+        message.flip();
+        IntSerDeUtils.putInt(input.array(), FrameHelper.getTupleCountOffset(input.capacity()), tc);
+    }
 }

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
index 631eef4..2e3b8ec 100644
--- a/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
+++ b/asterix-external-data/src/main/java/org/apache/asterix/external/util/FileSystemWatcher.java
@@ -47,23 +47,25 @@ public class FileSystemWatcher {
     private final LinkedList<File> files = new LinkedList<File>();
     private Iterator<File> it;
     private final String expression;
-    private final FeedLogManager logManager;
+    private FeedLogManager logManager;
     private final Path path;
     private final boolean isFeed;
     private boolean done;
     private File current;
     private AbstractFeedDataFlowController controller;
 
-    public FileSystemWatcher(FeedLogManager logManager, Path inputResource, String expression, boolean isFeed)
-            throws IOException {
+    public FileSystemWatcher(Path inputResource, String expression, boolean isFeed) throws IOException {
         this.watcher = isFeed ? FileSystems.getDefault().newWatchService() : null;
         this.keys = isFeed ? new HashMap<WatchKey, Path>() : null;
-        this.logManager = logManager;
         this.expression = expression;
         this.path = inputResource;
         this.isFeed = isFeed;
     }
 
+    public void setFeedLogManager(FeedLogManager feedLogManager) {
+        this.logManager = feedLogManager;
+    }
+
     public void init() throws IOException {
         LinkedList<Path> dirs = null;
         dirs = new LinkedList<Path>();
@@ -91,13 +93,6 @@ public class FileSystemWatcher {
         if (logManager == null) {
             return;
         }
-        if (logManager.exists()) {
-            logManager.open();
-        } else {
-            logManager.create();
-            logManager.open();
-            return;
-        }
         /*
          * Done processing the progress log file. We now have:
          * the files that were completed.
@@ -210,6 +205,9 @@ public class FileSystemWatcher {
             return false;
         }
         files.clear();
+        if (keys.isEmpty()) {
+            return false;
+        }
         // Read new Events (Polling first to add all available files)
         WatchKey key;
         key = watcher.poll();

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AMutableCharArrayString.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AMutableCharArrayString.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AMutableCharArrayString.java
new file mode 100644
index 0000000..6722a83
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AMutableCharArrayString.java
@@ -0,0 +1,357 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+public class AMutableCharArrayString implements Comparable<AMutableCharArrayString>, CharSequence {
+    private char[] value;
+    private int length;
+    private int increment = 64;
+
+    public AMutableCharArrayString(String str) {
+        this.value = str.toCharArray();
+        this.length = value.length;
+    }
+
+    public void decrementLength() {
+        length--;
+    }
+
+    public AMutableCharArrayString() {
+        length = 0;
+        value = new char[increment];
+    }
+
+    @Override
+    public char charAt(int i) {
+        return value[i];
+    }
+
+    @Override
+    public String toString() {
+        return String.valueOf(value, 0, length);
+    }
+
+    public AMutableCharArrayString(char[] value, int length) {
+        this.value = value;
+        this.length = length;
+    }
+
+    public AMutableCharArrayString(AMutableCharArrayString aMutableCharArrayString) {
+        this.value = new char[aMutableCharArrayString.length];
+        setValue(aMutableCharArrayString);
+    }
+
+    public AMutableCharArrayString(int iniitialSize) {
+        this.value = new char[iniitialSize];
+        this.length = 0;
+    }
+
+    private void expand() {
+        char[] tmpValue = new char[length + increment];
+        System.arraycopy(value, 0, tmpValue, 0, length);
+        value = tmpValue;
+    }
+
+    private void copyAndExpand(int newSize) {
+        char[] tmpValue = new char[newSize];
+        System.arraycopy(value, 0, tmpValue, 0, length);
+        value = tmpValue;
+    }
+
+    public void appendChar(char aChar) {
+        if (length == value.length) {
+            expand();
+        }
+        value[length] = aChar;
+        length++;
+    }
+
+    public void erase(int position) {
+        if (position != length - 1) {
+            System.arraycopy(value, position + 1, value, position, length - (position + 1));
+        }
+        length--;
+    }
+
+    public void setLength(int l) {
+        this.length = l;
+    }
+
+    public void setValue(AMutableCharArrayString otherString) {
+        if (otherString.length > value.length) {
+            // need to reallocate
+            value = new char[otherString.length];
+        }
+        System.arraycopy(otherString.value, 0, value, 0, otherString.length);
+        this.length = otherString.length;
+    }
+
+    public void setValue(String format) {
+        reset();
+        appendString(format);
+    }
+
+    public void reset() {
+        this.length = 0;
+    }
+
+    public void setValue(AMutableCharArrayString otherString, int length) {
+        if (length > value.length) {
+            // need to reallocate
+            value = new char[length];
+        }
+        System.arraycopy(otherString.value, 0, value, 0, length);
+        this.length = length;
+    }
+
+    public void setValue(String otherString, int length) {
+        if (length > value.length) {
+            // need to reallocate
+            value = new char[length];
+        }
+        otherString.getChars(0, otherString.length(), value, 0);
+        this.length = length;
+    }
+
+    public void copyValue(char[] value, int length) {
+        if (length > this.value.length) {
+            // need to reallocate
+            this.value = new char[length];
+        }
+        System.arraycopy(value, 0, this.value, 0, length);
+        this.length = length;
+    }
+
+    public void setString(char[] value, int length) {
+        this.value = value;
+        this.length = length;
+    }
+
+    public void setChar(int i, char ch) {
+        value[i] = ch;
+    }
+
+    public void incrementLength() {
+        if (value.length == length) {
+            expand();
+        }
+        length++;
+    }
+
+    public boolean isEqualsIgnoreCaseLower(char[] compareTo) {
+        if (length == compareTo.length) {
+            for (int i = 0; i < length; i++) {
+                if (compareTo[i] != Character.toLowerCase(value[i])) {
+                    return false;
+                }
+            }
+            return true;
+        }
+        return false;
+    }
+
+    public void appendString(String aString) {
+        if (value.length - length < aString.length()) {
+            copyAndExpand(value.length + aString.length());
+        }
+        aString.getChars(0, aString.length(), value, length);
+        length += aString.length();
+    }
+
+    @Override
+    public boolean equals(Object o) {
+        if (o instanceof AMutableCharArrayString) {
+            AMutableCharArrayString s = (AMutableCharArrayString) o;
+            if (length == s.length) {
+                for (int i = 0; i < length; i++) {
+                    if (value[i] != s.value[i]) {
+                        return false;
+                    }
+                }
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public boolean equalsIgnoreCase(Object o) {
+        if (o instanceof AMutableCharArrayString) {
+            AMutableCharArrayString s = (AMutableCharArrayString) o;
+            if (length == s.length) {
+                for (int i = 0; i < length; i++) {
+                    if (Character.toLowerCase(value[i]) != Character.toLowerCase(s.value[i])) {
+                        return false;
+                    }
+                }
+                return true;
+            }
+        }
+        return false;
+    }
+
+    public boolean equalsString(String aString) {
+        if (length == aString.length()) {
+            for (int i = 0; i < length; i++) {
+                if (value[i] != aString.charAt(i)) {
+                    return false;
+                }
+            }
+            return true;
+        }
+        return false;
+    }
+
+    public void erase(int position, int length) {
+        if (length + position >= this.length) {
+            this.length -= length;
+        } else {
+            System.arraycopy(value, position + length, value, position, this.length - (position + length));
+            this.length -= length;
+        }
+    }
+
+    public String substr(int i, int len) {
+        return String.copyValueOf(value, i, len);
+    }
+
+    public int firstNonDigitChar() {
+        for (int i = 0; i < length; i++) {
+            if (!Character.isDigit(value[i])) {
+                return i;
+            }
+        }
+        return -1;
+    }
+
+    public int fistNonDoubleDigitChar() {
+        boolean inFraction = false;
+        boolean prevCharIsPoint = false;
+        for (int i = 0; i < length; i++) {
+            if (!Character.isDigit(value[i])) {
+                if (inFraction) {
+                    if (prevCharIsPoint) {
+                        return i - 1;
+                    } else {
+                        return i;
+                    }
+                } else {
+                    if (value[i] == '.') {
+                        inFraction = true;
+                        prevCharIsPoint = true;
+                    }
+                }
+            } else {
+                prevCharIsPoint = false;
+            }
+        }
+        return -1;
+    }
+
+    @Override
+    public int compareTo(AMutableCharArrayString o) {
+        return toString().compareTo(o.toString());
+    }
+
+    public int compareTo(String o) {
+        return toString().compareTo(o);
+    }
+
+    public int compareToIgnoreCase(AMutableCharArrayString o) {
+        return toString().compareToIgnoreCase(o.toString());
+    }
+
+    public void appendString(AMutableCharArrayString aString) {
+        if (value.length - length < aString.length()) {
+            copyAndExpand(value.length + aString.length());
+        }
+        System.arraycopy(aString.value, 0, value, length, aString.length);
+        length += aString.length();
+    }
+
+    @Override
+    public int length() {
+        return length;
+    }
+
+    public int size() {
+        return length;
+    }
+
+    @Override
+    public CharSequence subSequence(int start, int end) {
+        return substr(start, end - start);
+    }
+
+    public int firstIndexOf(char delim) {
+        return firstIndexOf(delim, 0);
+    }
+
+    public int firstIndexOf(char delim, int startIndex) {
+        int position = startIndex;
+        while (position < length) {
+            if (value[position] == delim) {
+                return position;
+            }
+            position++;
+        }
+        return -1;
+    }
+
+    public String substr(int lastIndex) {
+        return String.copyValueOf(value, lastIndex, length - lastIndex);
+    }
+
+    public void prependChar(char c) {
+        if (value.length == length) {
+            copyAndExpand(value.length * 2);
+        }
+        System.arraycopy(value, 0, value, 1, length);
+        value[0] = c;
+        length += 1;
+    }
+
+    public void insert(int i, String aString) {
+        if (value.length - length < aString.length()) {
+            copyAndExpand(value.length + aString.length());
+        }
+        System.arraycopy(value, i, value, i + aString.length(), aString.length());
+        aString.getChars(0, aString.length(), value, i);
+        length += aString.length();
+    }
+
+    public char[] getValue() {
+        return value;
+    }
+
+    public int getLength() {
+        return length;
+    }
+
+    public int getIncrement() {
+        return increment;
+    }
+
+    public void setValue(char[] value) {
+        this.value = value;
+    }
+
+    public void setIncrement(int increment) {
+        this.increment = increment;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AMutableNumberFactor.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AMutableNumberFactor.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AMutableNumberFactor.java
new file mode 100644
index 0000000..5c2a2f9
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AMutableNumberFactor.java
@@ -0,0 +1,37 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import org.apache.asterix.external.classad.Value.NumberFactor;
+
+public class AMutableNumberFactor {
+    private NumberFactor factor;
+
+    public AMutableNumberFactor() {
+        factor = NumberFactor.NO_FACTOR;
+    }
+
+    public NumberFactor getFactor() {
+        return factor;
+    }
+
+    public void setFactor(NumberFactor factor) {
+        this.factor = factor;
+    }
+}

http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AttributeReference.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AttributeReference.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AttributeReference.java
new file mode 100644
index 0000000..04fe6ec
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/AttributeReference.java
@@ -0,0 +1,474 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+public class AttributeReference extends ExprTree {
+
+    private ExprTree expr;
+    private boolean absolute;
+    private AMutableCharArrayString attributeStr;
+    private ClassAd current = new ClassAd(false, false);
+    private ExprList adList = new ExprList();
+    private Value val = new Value();
+    private MutableBoolean rVal = new MutableBoolean(false);
+    private AttributeReference tempAttrRef;
+    private EvalState tstate = new EvalState();
+
+    public ExprTree getExpr() {
+        return expr;
+    }
+
+    public void setExpr(ExprTree expr) {
+        this.expr = expr == null ? null : expr.self();
+    }
+
+    public AttributeReference() {
+        expr = null;
+        attributeStr = null;
+        absolute = false;
+    }
+
+    /// Copy Constructor
+    public AttributeReference(AttributeReference ref) throws HyracksDataException {
+        copyFrom(ref);
+    }
+
+    /// Assignment operator
+    @Override
+    public boolean equals(Object o) {
+        if (o instanceof AttributeReference) {
+            AttributeReference ref = (AttributeReference) o;
+            return sameAs(ref);
+        }
+        return false;
+    }
+
+    /// node type
+    @Override
+    public NodeKind getKind() {
+        return NodeKind.ATTRREF_NODE;
+    }
+
+    public static AttributeReference createAttributeReference(ExprTree expr, AMutableCharArrayString attrName) {
+        return createAttributeReference(expr, attrName, false);
+    }
+
+    /**
+     * Return a copy of this attribute reference.
+     *
+     * @throws HyracksDataException
+     */
+    @Override
+    public ExprTree copy() throws HyracksDataException {
+        AttributeReference newTree = new AttributeReference();
+        newTree.copyFrom(this);
+        return newTree;
+    }
+
+    /**
+     * Copy from the given reference into this reference.
+     *
+     * @param ref
+     *            The reference to copy from.
+     * @return true if the copy succeeded, false otherwise.
+     * @throws HyracksDataException
+     */
+    public boolean copyFrom(AttributeReference ref) throws HyracksDataException {
+        if (attributeStr == null) {
+            attributeStr = new AMutableCharArrayString(ref.attributeStr);
+        } else {
+            attributeStr.setValue(ref.attributeStr);
+        }
+        if (ref.expr != null) {
+            expr = ref.expr.copy();
+        }
+        super.copyFrom(ref);
+        this.absolute = ref.absolute;
+        return true;
+    }
+
+    /**
+     * Is this attribute reference the same as another?
+     *
+     * @param tree
+     *            The reference to compare with
+     * @return true if they are the same, false otherwise.
+     */
+    @Override
+    public boolean sameAs(ExprTree tree) {
+        boolean is_same;
+        ExprTree pSelfTree = tree.self();
+        if (this == pSelfTree) {
+            is_same = true;
+        } else if (pSelfTree.getKind() != NodeKind.ATTRREF_NODE) {
+            is_same = false;
+        } else {
+            AttributeReference other_ref = (AttributeReference) pSelfTree;
+            if (absolute != other_ref.absolute || !attributeStr.equals(other_ref.attributeStr)) {
+                is_same = false;
+            } else if ((expr == null && other_ref.expr == null) || (expr.equals(other_ref.expr))
+                    || (expr != null && other_ref.expr != null && ((AttributeReference) expr).sameAs(other_ref.expr))) {
+                // Will this check result in infinite recursion? How do I stop it?
+                is_same = true;
+            } else {
+                is_same = false;
+            }
+        }
+        return is_same;
+    }
+
+    // a private ctor for use in significant expr identification
+    private AttributeReference(ExprTree tree, AMutableCharArrayString attrname, boolean absolut) {
+        attributeStr = attrname;
+        expr = tree == null ? null : tree.self();
+        absolute = absolut;
+    }
+
+    @Override
+    public void privateSetParentScope(ClassAd parent) {
+        if (expr != null) {
+            expr.setParentScope(parent);
+        }
+    }
+
+    public void getComponents(ExprTreeHolder tree, AMutableCharArrayString attr, MutableBoolean abs)
+            throws HyracksDataException {
+        tree.copyFrom(expr);
+        attr.setValue(attributeStr);
+        abs.setValue(absolute);
+    }
+
+    public EvalResult findExpr(EvalState state, ExprTreeHolder tree, ExprTreeHolder sig, boolean wantSig)
+            throws HyracksDataException {
+        // establish starting point for search
+        if (expr == null) {
+            // "attr" and ".attr"
+            current = absolute ? state.getRootAd() : state.getCurAd();
+            if (absolute && (current == null)) { // NAC - circularity so no root
+                return EvalResult.EVAL_FAIL; // NAC
+            } // NAC
+        } else {
+            // "expr.attr"
+            rVal.setValue(wantSig ? expr.publicEvaluate(state, val, sig) : expr.publicEvaluate(state, val));
+            if (!rVal.booleanValue()) {
+                return (EvalResult.EVAL_FAIL);
+            }
+
+            if (val.isUndefinedValue()) {
+                return (EvalResult.EVAL_UNDEF);
+            } else if (val.isErrorValue()) {
+                return (EvalResult.EVAL_ERROR);
+            }
+
+            if (!val.isClassAdValue(current) && !val.isListValue(adList)) {
+                return (EvalResult.EVAL_ERROR);
+            }
+        }
+
+        if (val.isListValue()) {
+            ExprList eList = new ExprList();
+            //
+            // iterate through exprList and apply attribute reference
+            // to each exprTree
+            for (ExprTree currExpr : adList.getExprList()) {
+                if (currExpr == null) {
+                    return (EvalResult.EVAL_FAIL);
+                } else {
+                    if (tempAttrRef == null) {
+                        tempAttrRef = new AttributeReference();
+                    } else {
+                        tempAttrRef.reset();
+                    }
+                    createAttributeReference(currExpr.copy(), attributeStr, false, tempAttrRef);
+                    val.clear();
+                    // Create new EvalState, within this scope, because
+                    // attrRef is only temporary, so we do not want to
+                    // cache the evaluated result in the outer state object.
+                    tstate.reset();
+                    tstate.setScopes(state.getCurAd());
+                    rVal.setValue(wantSig ? tempAttrRef.publicEvaluate(tstate, val, sig)
+                            : tempAttrRef.publicEvaluate(tstate, val));
+                    if (!rVal.booleanValue()) {
+                        return (EvalResult.EVAL_FAIL);
+                    }
+
+                    ClassAd evaledAd = new ClassAd();
+                    ExprList evaledList = new ExprList();
+                    if (val.isClassAdValue(evaledAd)) {
+                        eList.add(evaledAd);
+                        continue;
+                    } else if (val.isListValue(evaledList)) {
+                        eList.add(evaledList.copy());
+                        continue;
+                    } else {
+                        eList.add(Literal.createLiteral(val));
+                    }
+                }
+            }
+            tree.setInnerTree(ExprList.createExprList(eList));
+            ClassAd newRoot = new ClassAd();
+            tree.setParentScope(newRoot);
+            return EvalResult.EVAL_OK;
+        }
+        // lookup with scope; this may side-affect state
+
+        /* ClassAd::alternateScope is intended for transitioning Condor from
+         * old to new ClassAds. It allows unscoped attribute references
+         * in expressions that can't be found in the local scope to be
+         * looked for in an alternate scope. In Condor, the alternate
+         * scope is the Target ad in matchmaking.
+         * Expect alternateScope to be removed from a future release.
+         */
+        if (current == null) {
+            return EvalResult.EVAL_UNDEF;
+        }
+        int rc = current.lookupInScope(attributeStr.toString(), tree, state);
+        if (expr == null && !absolute && rc == EvalResult.EVAL_UNDEF.ordinal() && current.getAlternateScope() != null) {
+            rc = current.getAlternateScope().lookupInScope(attributeStr.toString(), tree, state);
+        }
+        return EvalResult.values()[rc];
+    }
+
+    @Override
+    public boolean publicEvaluate(EvalState state, Value val) throws HyracksDataException {
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprTreeHolder dummy = new ExprTreeHolder();
+        ClassAd curAd = new ClassAd(state.getCurAd());
+        boolean rval;
+        // find the expression and the evalstate
+        switch (findExpr(state, tree, dummy, false)) {
+            case EVAL_FAIL:
+                return false;
+            case EVAL_ERROR:
+                val.setErrorValue();
+                state.setCurAd(curAd);
+                return true;
+            case EVAL_UNDEF:
+                val.setUndefinedValue();
+                state.setCurAd(curAd);
+                return true;
+            case EVAL_OK: {
+                if (state.getDepthRemaining() <= 0) {
+                    val.setErrorValue();
+                    state.setCurAd(curAd);
+                    return false;
+                }
+                state.decrementDepth();
+                rval = tree.publicEvaluate(state, val);
+                state.incrementDepth();
+                state.getCurAd().setValue(curAd);
+                return rval;
+            }
+            default:
+                throw new HyracksDataException("ClassAd:  Should not reach here");
+        }
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val, ExprTreeHolder sig) throws HyracksDataException {
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprTreeHolder exprSig = new ExprTreeHolder();
+        ClassAd curAd = new ClassAd(state.getCurAd());
+        MutableBoolean rval = new MutableBoolean(true);
+        switch (findExpr(state, tree, exprSig, true)) {
+            case EVAL_FAIL:
+                rval.setValue(false);
+                break;
+            case EVAL_ERROR:
+                val.setErrorValue();
+                break;
+            case EVAL_UNDEF:
+                val.setUndefinedValue();
+                break;
+            case EVAL_OK: {
+                if (state.getDepthRemaining() <= 0) {
+                    val.setErrorValue();
+                    state.getCurAd().setValue(curAd);
+                    return false;
+                }
+                state.decrementDepth();
+                rval.setValue(tree.publicEvaluate(state, val));
+                state.incrementDepth();
+                break;
+            }
+            default:
+                throw new HyracksDataException("ClassAd:  Should not reach here");
+        }
+        sig.setInnerTree((new AttributeReference(exprSig, attributeStr, absolute)));
+        state.getCurAd().setValue(curAd);
+        return rval.booleanValue();
+    }
+
+    @Override
+    public boolean privateFlatten(EvalState state, Value val, ExprTreeHolder ntree, AMutableInt32 op)
+            throws HyracksDataException {
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprTreeHolder dummy = new ExprTreeHolder();
+        ClassAd curAd;
+        boolean rval;
+        ntree.setInnerTree(null); // Just to be safe...  wenger 2003-12-11.
+        // find the expression and the evalstate
+        curAd = state.getCurAd();
+        switch (findExpr(state, tree, dummy, false)) {
+            case EVAL_FAIL:
+                return false;
+            case EVAL_ERROR:
+                val.setErrorValue();
+                state.getCurAd().setValue(curAd);
+                return true;
+            case EVAL_UNDEF:
+                if (expr != null && state.isFlattenAndInline()) {
+                    ExprTreeHolder expr_ntree = new ExprTreeHolder();
+                    Value expr_val = new Value();
+                    if (state.getDepthRemaining() <= 0) {
+                        val.setErrorValue();
+                        state.getCurAd().setValue(curAd);
+                        return false;
+                    }
+                    state.decrementDepth();
+                    rval = expr.publicFlatten(state, expr_val, expr_ntree);
+                    state.incrementDepth();
+                    if (rval && expr_ntree.getInnerTree() != null) {
+                        ntree.setInnerTree(createAttributeReference(expr_ntree, attributeStr));
+                        if (ntree.getInnerTree() != null) {
+                            state.getCurAd().setValue(curAd);
+                            return true;
+                        }
+                    }
+                }
+                ntree.setInnerTree(copy());
+                state.getCurAd().setValue(curAd);
+                return true;
+            case EVAL_OK: {
+                // Don't flatten or inline a classad that's referred to
+                // by an attribute.
+                if (tree.getKind() == NodeKind.CLASSAD_NODE) {
+                    ntree.setInnerTree(copy());
+                    val.setUndefinedValue();
+                    return true;
+                }
+
+                if (state.getDepthRemaining() <= 0) {
+                    val.setErrorValue();
+                    state.getCurAd().setValue(curAd);
+                    return false;
+                }
+                state.decrementDepth();
+
+                rval = tree.publicFlatten(state, val, ntree);
+                state.incrementDepth();
+
+                // don't inline if it didn't flatten to a value, and clear cache
+                // do inline if FlattenAndInline was called
+                if (ntree.getInnerTree() != null) {
+                    if (state.isFlattenAndInline()) { // NAC
+                        return true; // NAC
+                    } // NAC
+                    ntree.setInnerTree(copy());
+                    val.setUndefinedValue();
+                }
+
+                state.getCurAd().setValue(curAd);
+                return rval;
+            }
+            default:
+                throw new HyracksDataException("ClassAd:  Should not reach here");
+        }
+    }
+
+    /**
+     * Factory method to create attribute reference nodes.
+     *
+     * @param expr
+     *            The expression part of the reference (i.e., in
+     *            case of expr.attr). This parameter is NULL if the reference
+     *            is absolute (i.e., .attr) or simple (i.e., attr).
+     * @param attrName
+     *            The name of the reference. This string is
+     *            duplicated internally.
+     * @param absolute
+     *            True if the reference is an absolute reference
+     *            (i.e., in case of .attr). This parameter cannot be true if
+     *            expr is not NULL, default value is false;
+     */
+    public static AttributeReference createAttributeReference(ExprTree tree, AMutableCharArrayString attrStr,
+            boolean absolut) {
+        return (new AttributeReference(tree, attrStr, absolut));
+    }
+
+    public void setValue(ExprTree tree, AMutableCharArrayString attrStr, boolean absolut) {
+        this.absolute = absolut;
+        this.attributeStr = attrStr;
+        this.expr = tree == null ? null : tree.self();
+    }
+
+    public static void createAttributeReference(ExprTree tree, AMutableCharArrayString attrStr, boolean absolut,
+            AttributeReference ref) {
+        ref.setValue(tree, attrStr, absolut);
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value val) throws HyracksDataException {
+        ExprTreeHolder tree = new ExprTreeHolder();
+        ExprTreeHolder dummy = new ExprTreeHolder();
+        ClassAd curAd;
+        boolean rval;
+
+        // find the expression and the evalstate
+        curAd = state.getCurAd();
+        switch (findExpr(state, tree, dummy, false)) {
+            case EVAL_FAIL:
+                return false;
+            case EVAL_ERROR:
+                val.setErrorValue();
+                state.getCurAd().setValue(curAd);
+                return true;
+            case EVAL_UNDEF:
+                val.setUndefinedValue();
+                state.getCurAd().setValue(curAd);
+                return true;
+
+            case EVAL_OK: {
+                if (state.getDepthRemaining() <= 0) {
+                    val.setErrorValue();
+                    state.getCurAd().setValue(curAd);
+                    return false;
+                }
+                state.decrementDepth();
+                rval = tree.publicEvaluate(state, val);
+                state.incrementDepth();
+                state.getCurAd().setValue(curAd);
+                return rval;
+            }
+            default:
+                throw new HyracksDataException("ClassAd:  Should not reach here");
+        }
+    }
+
+    @Override
+    public void reset() {
+        if (expr != null) {
+            expr.reset();
+        }
+    }
+}


[11/34] incubator-asterixdb git commit: Enabled Feed Tests and Added External Library tests

Posted by am...@apache.org.
http://git-wip-us.apache.org/repos/asf/incubator-asterixdb/blob/ac683db0/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Operation.java
----------------------------------------------------------------------
diff --git a/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Operation.java b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Operation.java
new file mode 100644
index 0000000..ae3c5ef
--- /dev/null
+++ b/asterix-external-data/src/test/java/org/apache/asterix/external/classad/Operation.java
@@ -0,0 +1,1902 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+package org.apache.asterix.external.classad;
+
+import org.apache.asterix.external.classad.Value.ValueType;
+import org.apache.asterix.om.base.AMutableDouble;
+import org.apache.asterix.om.base.AMutableInt32;
+import org.apache.asterix.om.base.AMutableInt64;
+import org.apache.commons.lang3.mutable.MutableBoolean;
+import org.apache.hyracks.api.exceptions.HyracksDataException;
+
+/**
+ * Represents a node of the expression tree which is an operation applied to
+ * expression operands, like 3 + 2
+ */
+public class Operation extends ExprTree {
+    enum SigValues {
+        SIG_NONE,
+        SIG_CHLD1,
+        SIG_CHLD2,
+        SIG_DUMMY,
+        SIG_CHLD3
+    };
+
+    /// List of supported operators
+    public static final int OpKind_NO_OP = 0;
+    public static final int OpKind_FIRST_OP = 1;
+    // Comparison
+    public static final int OpKind_COMPARISON_START = OpKind_FIRST_OP;
+    /** @name Strict comparison operators */
+    public static final int OpKind_LESS_THAN_OP = OpKind_COMPARISON_START;
+    public static final int OpKind_LESS_OR_EQUAL_OP = OpKind_LESS_THAN_OP + 1;
+    public static final int OpKind_NOT_EQUAL_OP = OpKind_LESS_OR_EQUAL_OP + 1;
+    public static final int OpKind_EQUAL_OP = OpKind_NOT_EQUAL_OP + 1;
+    public static final int OpKind_GREATER_OR_EQUAL_OP = OpKind_EQUAL_OP + 1;
+    public static final int OpKind_GREATER_THAN_OP = OpKind_GREATER_OR_EQUAL_OP + 1;
+    /** @name Non-strict comparison operators */
+    public static final int OpKind_META_EQUAL_OP = OpKind_GREATER_THAN_OP + 1;
+    public static final int OpKind_IS_OP = OpKind_META_EQUAL_OP;
+    public static final int OpKind_META_NOT_EQUAL_OP = OpKind_IS_OP + 1;
+    public static final int OpKind_ISNT_OP = OpKind_META_NOT_EQUAL_OP;
+    public static final int OpKind_COMPARISON_END = OpKind_ISNT_OP;
+    /** @name Arithmetic operators */
+    public static final int OpKind_ARITHMETIC_START = OpKind_COMPARISON_END + 1;
+    public static final int OpKind_UNARY_PLUS_OP = OpKind_ARITHMETIC_START;
+    public static final int OpKind_UNARY_MINUS_OP = OpKind_UNARY_PLUS_OP + 1;
+    public static final int OpKind_ADDITION_OP = OpKind_UNARY_MINUS_OP + 1;
+    public static final int OpKind_SUBTRACTION_OP = OpKind_ADDITION_OP + 1;
+    public static final int OpKind_MULTIPLICATION_OP = OpKind_SUBTRACTION_OP + 1;
+    public static final int OpKind_DIVISION_OP = OpKind_MULTIPLICATION_OP + 1;
+    public static final int OpKind_MODULUS_OP = OpKind_DIVISION_OP + 1;
+    public static final int OpKind_ARITHMETIC_END = OpKind_MODULUS_OP;
+    /** @name Logical operators */
+    public static final int OpKind_LOGIC_START = OpKind_ARITHMETIC_END + 1;
+    public static final int OpKind_LOGICAL_NOT_OP = OpKind_LOGIC_START;
+    public static final int OpKind_LOGICAL_OR_OP = OpKind_LOGICAL_NOT_OP + 1;
+    public static final int OpKind_LOGICAL_AND_OP = OpKind_LOGICAL_OR_OP + 1;
+    public static final int OpKind_LOGIC_END = OpKind_LOGICAL_AND_OP;
+    /** @name Bitwise operators */
+    public static final int OpKind_BITWISE_START = OpKind_LOGIC_END + 1;
+    public static final int OpKind_BITWISE_NOT_OP = OpKind_BITWISE_START;
+    public static final int OpKind_BITWISE_OR_OP = OpKind_BITWISE_NOT_OP + 1;
+    public static final int OpKind_BITWISE_XOR_OP = OpKind_BITWISE_OR_OP + 1;
+    public static final int OpKind_BITWISE_AND_OP = OpKind_BITWISE_XOR_OP + 1;
+    public static final int OpKind_LEFT_SHIFT_OP = OpKind_BITWISE_AND_OP + 1;
+    public static final int OpKind_RIGHT_SHIFT_OP = OpKind_LEFT_SHIFT_OP + 1;
+    public static final int OpKind_URIGHT_SHIFT_OP = OpKind_RIGHT_SHIFT_OP + 1;
+    public static final int OpKind_BITWISE_END = OpKind_URIGHT_SHIFT_OP;
+    /** @name Miscellaneous operators */
+    public static final int OpKind_MISC_START = OpKind_BITWISE_END + 1;
+    public static final int OpKind_PARENTHESES_OP = OpKind_MISC_START;
+    public static final int OpKind_SUBSCRIPT_OP = OpKind_PARENTHESES_OP + 1;
+    public static final int OpKind_TERNARY_OP = OpKind_SUBSCRIPT_OP + 1;
+    public static final int OpKind_MISC_END = OpKind_TERNARY_OP;
+    public static final int OpKind_LAST_OP = OpKind_MISC_END;
+
+    private int opKind;
+    private ExprTree child1;
+    private ExprTree child2;
+    private ExprTree child3;
+
+    /// node type
+    @Override
+    public NodeKind getKind() {
+        return NodeKind.OP_NODE;
+    }
+
+    public int getOpKind() {
+        return opKind;
+    }
+
+    /**
+     * Factory method to create an operation expression node
+     *
+     * @param kind
+     *            The kind of operation.
+     * @param e1
+     *            The first sub-expression child of the node.
+     * @param e2
+     *            The second sub-expression child of the node (if any).
+     * @param e3
+     *            The third sub-expression child of the node (if any).
+     * @return The constructed operation
+     */
+
+    public static Operation createOperation(int opkind, ExprTree e1, ExprTree e2) {
+        return createOperation(opkind, e1, e2, null);
+    }
+
+    public static Operation createOperation(int opkind, ExprTree e1) {
+        return createOperation(opkind, e1, null, null);
+    }
+
+    // public access to operation function
+    /**
+     * Convenience method which operates on binary operators.
+     *
+     * @param op
+     *            The kind of operation.
+     * @param op1
+     *            The first operand.
+     * @param op2
+     *            The second operand.
+     * @param result
+     *            The result of the operation.
+     * @see OpKind, Value
+     */
+
+    /**
+     * Convenience method which operates on ternary operators.
+     *
+     * @param op
+     *            The kind of operation.
+     * @param op1
+     *            The first operand.
+     * @param op2
+     *            The second operand.
+     * @param op3
+     *            The third operand.
+     * @param result
+     *            The result of the operation.
+     * @see OpKind, Value
+     */
+
+    /**
+     * Predicate which tests if an operator is strict.
+     *
+     * @param op
+     *            The operator to be tested.
+     * @return true if the operator is strict, false otherwise.
+     */
+
+    public Operation() {
+        opKind = OpKind_NO_OP;
+        child1 = null;
+        child2 = null;
+        child3 = null;
+    }
+
+    public Operation(Operation op) throws HyracksDataException {
+        copyFrom(op);
+        return;
+    }
+
+    @Override
+    public ExprTree copy() throws HyracksDataException {
+        Operation newTree = new Operation();
+        newTree.copyFrom(this);
+        return newTree;
+    }
+
+    public boolean copyFrom(Operation op) throws HyracksDataException {
+        boolean success = true;
+        if (op.child1 == null) {
+            child1 = null;
+        } else {
+            if (child1 == null) {
+                child1 = new ExprTreeHolder();
+            }
+            child1.copyFrom(op.child1);
+            child1 = child1.self();
+        }
+        if (op.child2 == null) {
+            child2 = null;
+        } else {
+            if (child2 == null) {
+                child2 = new ExprTreeHolder();
+            }
+            child2.copyFrom(op.child2);
+            child2 = child2.self();
+        }
+        if (op.child3 == null) {
+            child3 = null;
+        } else {
+            if (child3 == null) {
+                child3 = new ExprTreeHolder();
+            }
+            child3.copyFrom(op.child3);
+            child3 = child3.self();
+        }
+        this.opKind = op.opKind;
+        super.copyFrom(op);
+        return success;
+    }
+
+    @Override
+    public boolean sameAs(ExprTree tree) {
+        boolean is_same = false;
+        Operation other_op;
+        ExprTree pSelfTree = tree.self();
+
+        if (pSelfTree.getKind() != NodeKind.OP_NODE) {
+            is_same = false;
+        } else {
+            other_op = (Operation) pSelfTree;
+            if (opKind == other_op.opKind && sameChild(child1, other_op.child1) && sameChild(child2, other_op.child2)
+                    && sameChild(child3, other_op.child3)) {
+                is_same = true;
+            } else {
+                is_same = false;
+            }
+        }
+        return is_same;
+    }
+
+    public boolean sameChild(ExprTree tree1, ExprTree tree2) {
+        boolean is_same = false;
+        if (tree1 == null) {
+            if (tree2 == null) {
+                is_same = true;
+            } else {
+                is_same = false;
+            }
+        } else if (tree2 == null) {
+            is_same = false;
+        } else {
+            is_same = tree1.sameAs(tree2);
+        }
+        return is_same;
+    }
+
+    @Override
+    public void privateSetParentScope(ClassAd parent) {
+        if (child1 != null) {
+            child1.setParentScope(parent);
+        }
+        if (child2 != null) {
+            child2.setParentScope(parent);
+        }
+        if (child3 != null) {
+            child3.setParentScope(parent);
+        }
+    }
+
+    public static void operate(int opKind, Value op1, Value op2, Value result) throws HyracksDataException {
+        Value dummy = new Value();
+        privateDoOperation(opKind, op1, op2, dummy, true, true, false, result, null);
+    }
+
+    public void operate(int op, Value op1, Value op2, Value op3, Value result) throws HyracksDataException {
+        privateDoOperation(op, op1, op2, op3, true, true, true, result, null);
+    }
+
+    public static int privateDoOperation(int op, Value val1, Value val2, Value val3, boolean valid1, boolean valid2,
+            boolean valid3, Value result) throws HyracksDataException {
+        return privateDoOperation(op, val1, val2, val3, valid1, valid2, valid3, result, null);
+    }
+
+    public static int privateDoOperation(int op, Value val1, Value val2, Value val3, boolean valid1, boolean valid2,
+            boolean valid3, Value result, EvalState es) throws HyracksDataException {
+        ValueType vt1;
+        ValueType vt2;
+        ValueType vt3;
+
+        // get the types of the values
+        vt1 = val1.getType();
+        vt2 = val2.getType();
+        vt3 = val3.getType();
+
+        // take care of the easy cases
+        if (op == OpKind_NO_OP || op == OpKind_PARENTHESES_OP) {
+            result.copyFrom(val1);
+            return SigValues.SIG_CHLD1.ordinal();
+        } else if (op == OpKind_UNARY_PLUS_OP) {
+            if (vt1 == ValueType.BOOLEAN_VALUE || vt1 == ValueType.STRING_VALUE || val1.isListValue()
+                    || vt1 == ValueType.CLASSAD_VALUE || vt1 == ValueType.ABSOLUTE_TIME_VALUE) {
+                result.setErrorValue();
+            } else {
+                // applies for ERROR, UNDEFINED and Numbers
+                result.copyFrom(val1);
+            }
+            return SigValues.SIG_CHLD1.ordinal();
+        }
+
+        // test for cases when evaluation is strict
+        if (isStrictOperator(op)) {
+            // check for error values
+            if (vt1 == ValueType.ERROR_VALUE) {
+                result.setErrorValue();
+                return SigValues.SIG_CHLD1.ordinal();
+            }
+            if (valid2 && vt2 == ValueType.ERROR_VALUE) {
+                result.setErrorValue();
+                return SigValues.SIG_CHLD2.ordinal();
+            }
+            if (valid3 && vt3 == ValueType.ERROR_VALUE) {
+                result.setErrorValue();
+                return SigValues.SIG_CHLD3.ordinal();
+            }
+
+            // check for undefined values.  we need to check if the corresponding
+            // tree exists, because these values would be undefined" anyway then.
+            if (valid1 && vt1 == ValueType.UNDEFINED_VALUE) {
+                result.setUndefinedValue();
+                return SigValues.SIG_CHLD1.ordinal();
+            }
+            if (valid2 && vt2 == ValueType.UNDEFINED_VALUE) {
+                result.setUndefinedValue();
+                return SigValues.SIG_CHLD2.ordinal();
+            }
+            if (valid3 && vt3 == ValueType.UNDEFINED_VALUE) {
+                result.setUndefinedValue();
+                return SigValues.SIG_CHLD3.ordinal();
+            }
+        }
+
+        // comparison operations (binary, one unary)
+        if (op >= OpKind_COMPARISON_START && op <= OpKind_COMPARISON_END) {
+            return (doComparison(op, val1, val2, result));
+        }
+
+        // arithmetic operations (binary)
+        if (op >= OpKind_ARITHMETIC_START && op <= OpKind_ARITHMETIC_END) {
+            return (doArithmetic(op, val1, val2, result));
+        }
+
+        // logical operators (binary, one unary)
+        if (op >= OpKind_LOGIC_START && op <= OpKind_LOGIC_END) {
+            return (doLogical(op, val1, val2, result));
+        }
+
+        // bitwise operators (binary, one unary)
+        if (op >= OpKind_BITWISE_START && op <= OpKind_BITWISE_END) {
+            return (doBitwise(op, val1, val2, result));
+        }
+
+        // misc.
+        if (op == OpKind_TERNARY_OP) {
+            // ternary (if-operator)
+            MutableBoolean b = new MutableBoolean(false);
+
+            // if the selector is UNDEFINED, the result is undefined
+            if (vt1 == ValueType.UNDEFINED_VALUE) {
+                result.setUndefinedValue();
+                return SigValues.SIG_CHLD1.ordinal();
+            }
+
+            if (!val1.isBooleanValueEquiv(b)) {
+                result.setErrorValue();
+                return SigValues.SIG_CHLD1.ordinal();
+            } else if (b.booleanValue()) {
+                result.copyFrom(val2);
+                return (SigValues.SIG_CHLD2.ordinal());
+            } else {
+                result.copyFrom(val3);
+                return (SigValues.SIG_CHLD3.ordinal());
+            }
+        } else if (op == OpKind_SUBSCRIPT_OP) {
+            // subscripting from a list (strict)
+
+            if (vt1 == ValueType.CLASSAD_VALUE && vt2 == ValueType.STRING_VALUE) {
+                ClassAd classad = new ClassAd();
+                AMutableCharArrayString index = new AMutableCharArrayString();
+
+                val1.isClassAdValue(classad);
+                val2.isStringValue(index);
+
+                if (classad.lookup(index.toString()) == null) {
+                    result.setErrorValue();
+                    return SigValues.SIG_CHLD2.ordinal();
+                }
+                if (!classad.evaluateAttr(index.toString(), result)) {
+                    result.setErrorValue();
+                    return SigValues.SIG_CHLD2.ordinal();
+                }
+
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            } else if (val1.isListValue() && vt2 == ValueType.INTEGER_VALUE) {
+                AMutableInt64 index = new AMutableInt64(0);
+                ExprList elist = new ExprList();
+
+                val1.isListValue(elist);
+                val2.isIntegerValue(index);
+
+                // check bounds
+                if (index.getLongValue() < 0 || index.getLongValue() >= elist.getExprList().size()) {
+                    result.setErrorValue();
+                    return SigValues.SIG_CHLD2.ordinal();
+                }
+                // get value
+                elist.getValue(result, elist.get((int) index.getLongValue()), es);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+            // should not reach here
+            throw new HyracksDataException("Should not get here");
+        }
+        return -1;
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value result) throws HyracksDataException {
+        Value val1 = new Value();
+        Value val2 = new Value();
+        Value val3 = new Value();
+        boolean valid1, valid2, valid3;
+        int rval = 0;
+
+        valid1 = false;
+        valid2 = false;
+        valid3 = false;
+
+        AMutableInt32 operationKind = new AMutableInt32(OpKind_NO_OP);
+        ExprTreeHolder child1 = new ExprTreeHolder();
+        ExprTreeHolder child2 = new ExprTreeHolder();
+        ExprTreeHolder child3 = new ExprTreeHolder();
+        getComponents(operationKind, child1, child2, child3);
+
+        // Evaluate all valid children
+        if (child1.getInnerTree() != null) {
+            if (!child1.publicEvaluate(state, val1)) {
+                result.setErrorValue();
+                return (false);
+            }
+            valid1 = true;
+
+            if (shortCircuit(state, val1, result)) {
+                return true;
+            }
+        }
+
+        if (child2.getInnerTree() != null) {
+            if (!child2.publicEvaluate(state, val2)) {
+                result.setErrorValue();
+                return (false);
+            }
+            valid2 = true;
+        }
+        if (child3.getInnerTree() != null) {
+            if (!child3.publicEvaluate(state, val3)) {
+                result.setErrorValue();
+                return (false);
+            }
+            valid3 = true;
+        }
+
+        rval = privateDoOperation(opKind, val1, val2, val3, valid1, valid2, valid3, result, state);
+
+        return (rval != SigValues.SIG_NONE.ordinal());
+    }
+
+    public boolean shortCircuit(EvalState state, Value arg1, Value result) throws HyracksDataException {
+        MutableBoolean arg1_bool = new MutableBoolean();
+        switch (opKind) {
+            case OpKind_LOGICAL_OR_OP:
+                if (arg1.isBooleanValueEquiv(arg1_bool) && arg1_bool.booleanValue()) {
+                    result.setBooleanValue(true);
+                    return true;
+                }
+                break;
+            case OpKind_LOGICAL_AND_OP:
+                if (arg1.isBooleanValueEquiv(arg1_bool) && !arg1_bool.booleanValue()) {
+                    result.setBooleanValue(false);
+                    return true;
+                }
+                break;
+            case OpKind_TERNARY_OP:
+                if (arg1.isBooleanValueEquiv(arg1_bool)) {
+                    if (arg1_bool.booleanValue()) {
+                        if (child2 != null) {
+                            return child2.publicEvaluate(state, result);
+                        }
+                    } else {
+                        if (child3 != null) {
+                            return child3.publicEvaluate(state, result);
+                        }
+                    }
+                }
+                break;
+            default:
+                // no-op
+                break;
+        }
+        return false;
+    }
+
+    @Override
+    public boolean privateEvaluate(EvalState state, Value result, ExprTreeHolder tree) throws HyracksDataException {
+        int sig;
+        Value val1 = new Value();
+        Value val2 = new Value();
+        Value val3 = new Value();
+        ExprTreeHolder t1 = new ExprTreeHolder();
+        ExprTreeHolder t2 = new ExprTreeHolder();
+        ExprTreeHolder t3 = new ExprTreeHolder();
+        boolean valid1 = false, valid2 = false, valid3 = false;
+        AMutableInt32 opKind = new AMutableInt32(OpKind_NO_OP);
+        ExprTreeHolder child1 = new ExprTreeHolder();
+        ExprTreeHolder child2 = new ExprTreeHolder();
+        ExprTreeHolder child3 = new ExprTreeHolder();
+        getComponents(opKind, child1, child2, child3);
+
+        // Evaluate all valid children
+        tree = new ExprTreeHolder();
+        if (child1.getInnerTree() != null) {
+            if (!child1.publicEvaluate(state, val1, t1)) {
+                result.setErrorValue();
+                return (false);
+            }
+            valid1 = true;
+        }
+
+        if (child2.getInnerTree() != null) {
+            if (!child2.publicEvaluate(state, val2, t2)) {
+                result.setErrorValue();
+                return (false);
+            }
+            valid2 = true;
+        }
+        if (child3.getInnerTree() != null) {
+            if (!child3.publicEvaluate(state, val3, t3)) {
+                result.setErrorValue();
+                return (false);
+            }
+            valid3 = true;
+        }
+
+        // do evaluation
+        sig = privateDoOperation(opKind.getIntegerValue().intValue(), val1, val2, val3, valid1, valid2, valid3, result,
+                state);
+
+        // delete trees which were not significant
+        if (valid1 && 0 != (sig & SigValues.SIG_CHLD1.ordinal())) {
+            t1 = null;
+        }
+        if (valid2 && 0 != (sig & SigValues.SIG_CHLD2.ordinal())) {
+            t2 = null;
+        }
+        if (valid3 && 0 != (sig & SigValues.SIG_CHLD3.ordinal())) {
+            t3 = null;
+        }
+
+        if (sig == SigValues.SIG_NONE.ordinal()) {
+            result.setErrorValue();
+            tree.setInnerTree(null);;
+            return (false);
+        }
+
+        // in case of strict operators, if a subexpression is significant and the
+        // corresponding value is UNDEFINED or ERROR, propagate only that tree
+        if (isStrictOperator(opKind.getIntegerValue().intValue())) {
+            // strict unary operators:  unary -, unary +, !, ~, ()
+            if (opKind.getIntegerValue().intValue() == OpKind_UNARY_MINUS_OP
+                    || opKind.getIntegerValue().intValue() == OpKind_UNARY_PLUS_OP
+                    || opKind.getIntegerValue().intValue() == OpKind_LOGICAL_NOT_OP
+                    || opKind.getIntegerValue().intValue() == OpKind_BITWISE_NOT_OP
+                    || opKind.getIntegerValue().intValue() == OpKind_PARENTHESES_OP) {
+                if (val1.isExceptional()) {
+                    // the operator is only propagating the value;  only the
+                    // subexpression is significant
+                    tree.setInnerTree(t1);
+                } else {
+                    // the node operated on the value; the operator is also
+                    // significant
+                    tree.setInnerTree(createOperation(opKind.getIntegerValue().intValue(), t1));
+                }
+                return (true);
+            } else {
+                // strict binary operators
+                if (val1.isExceptional() || val2.isExceptional()) {
+                    // exceptional values are only being propagated
+                    if (0 != (SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD1.ordinal())) {
+                        tree.setInnerTree(t1);
+                        return (true);
+                    } else if (0 != (SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD2.ordinal())) {
+                        tree.setInnerTree(t2);
+                        return (true);
+                    }
+                    throw new HyracksDataException("Should not reach here");
+                } else {
+                    // the node is also significant
+                    tree.setInnerTree(createOperation(opKind.getIntegerValue().intValue(), t1, t2));
+                    return (true);
+                }
+            }
+        } else {
+            // non-strict operators
+            if (opKind.getIntegerValue().intValue() == OpKind_IS_OP
+                    || opKind.getIntegerValue().intValue() == OpKind_ISNT_OP) {
+                // the operation is *always* significant for IS and ISNT
+                tree.setInnerTree(createOperation(opKind.getIntegerValue().intValue(), t1, t2));
+                return (true);
+            }
+            // other non-strict binary operators
+            if (opKind.getIntegerValue().intValue() == OpKind_LOGICAL_AND_OP
+                    || opKind.getIntegerValue().intValue() == OpKind_LOGICAL_OR_OP) {
+                if ((SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD1.ordinal()) != 0
+                        && (SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD2.ordinal()) != 0) {
+                    tree.setInnerTree(createOperation(opKind.getIntegerValue().intValue(), t1, t2));
+                    return (true);
+                } else if ((SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD1.ordinal()) != 0) {
+                    tree.setInnerTree(t1);
+                    return (true);
+                } else if ((SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD2.ordinal()) != 0) {
+                    tree.setInnerTree(t2);
+                    return (true);
+                } else {
+                    throw new HyracksDataException("Shouldn't reach here");
+                }
+            }
+            // non-strict ternary operator (conditional operator) s ? t : f
+            // selector is always significant (???)
+            if (opKind.getIntegerValue().intValue() == OpKind_TERNARY_OP) {
+                Value tmpVal = new Value();
+                tmpVal.setUndefinedValue();
+                tree.setInnerTree(Literal.createLiteral(tmpVal));
+
+                // "true" consequent taken
+                if ((SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD2.ordinal()) != 0) {
+                    tree.setInnerTree(t2);
+                    return (true);
+                } else if ((SigValues.values()[sig].ordinal() & SigValues.SIG_CHLD3.ordinal()) != 0) {
+                    tree.setInnerTree(t3);
+                    return (true);
+                }
+                // neither consequent; selector was exceptional; return ( s )
+                tree.setInnerTree(t1);
+                return (true);
+            }
+        }
+        throw new HyracksDataException("Should not reach here");
+    }
+
+    @Override
+    public boolean privateFlatten(EvalState state, Value val, ExprTreeHolder tree, AMutableInt32 opPtr)
+            throws HyracksDataException {
+        AMutableInt32 childOp1 = new AMutableInt32(OpKind_NO_OP);
+        AMutableInt32 childOp2 = new AMutableInt32(OpKind_NO_OP);
+        ExprTreeHolder fChild1 = new ExprTreeHolder();
+        ExprTreeHolder fChild2 = new ExprTreeHolder();;
+        Value val1 = new Value();
+        Value val2 = new Value();
+        Value val3 = new Value();
+        AMutableInt32 newOp = new AMutableInt32(opKind);
+        int op = opKind;
+
+        tree.setInnerTree(null);; // Just to be safe...  wenger 2003-12-11.
+
+        // if op is binary, but not associative or commutative, disallow splitting
+        if ((op >= OpKind_COMPARISON_START && op <= OpKind_COMPARISON_END) || op == OpKind_SUBTRACTION_OP
+                || op == OpKind_DIVISION_OP || op == OpKind_MODULUS_OP || op == OpKind_LEFT_SHIFT_OP
+                || op == OpKind_RIGHT_SHIFT_OP || op == OpKind_URIGHT_SHIFT_OP) {
+            if (opPtr != null)
+                opPtr.setValue(OpKind_NO_OP);
+            if (child1.publicFlatten(state, val1, fChild1) && child2.publicFlatten(state, val2, fChild2)) {
+                if (fChild1.getInnerTree() == null && fChild2.getInnerTree() == null) {
+                    privateDoOperation(op, val1, val2, val3, true, true, false, val);
+                    tree.setInnerTree(null);
+                    return true;
+                } else if (fChild1.getInnerTree() != null && fChild2.getInnerTree() != null) {
+                    tree.setInnerTree(Operation.createOperation(op, fChild1, fChild2));
+                    return true;
+                } else if (fChild1.getInnerTree() != null) {
+                    tree.setInnerTree(Operation.createOperation(op, fChild1, val2));
+                    return true;
+                } else if (fChild2.getInnerTree() != null) {
+                    tree.setInnerTree(Operation.createOperation(op, val1, fChild2));
+                    return true;
+                }
+            } else {
+                tree.setInnerTree(null);
+                return false;
+            }
+        } else
+        // now catch all non-binary operators
+        if (op == OpKind_TERNARY_OP || op == OpKind_SUBSCRIPT_OP || op == OpKind_UNARY_PLUS_OP
+                || op == OpKind_UNARY_MINUS_OP || op == OpKind_PARENTHESES_OP || op == OpKind_LOGICAL_NOT_OP
+                || op == OpKind_BITWISE_NOT_OP) {
+            return flattenSpecials(state, val, tree);
+        }
+
+        // any op that got past the above is binary, commutative and associative
+        // Flatten sub expressions
+        if ((child1 != null && !child1.publicFlatten(state, val1, fChild1, childOp1))
+                || (child2 != null && !child2.publicFlatten(state, val2, fChild2, childOp2))) {
+            tree.setInnerTree(null);
+            return false;
+        }
+
+        // NOTE: combine() deletes fChild1 and/or fChild2 if they are not
+        // included in tree
+        if (!combine(newOp, val, tree, childOp1, val1, fChild1, childOp2, val2, fChild2)) {
+            tree.setInnerTree(null);
+            if (opPtr != null) {
+                opPtr.setValue(OpKind_NO_OP);
+            }
+            return false;
+        }
+
+        // if splitting is disallowed, fold the value and tree into a tree
+        if (opPtr == null && newOp.getIntegerValue().intValue() != OpKind_NO_OP) {
+            tree.setInnerTree(Operation.createOperation(newOp.getIntegerValue().intValue(), val, tree));
+            if (tree.getInnerTree() == null) {
+                return false;
+            }
+            return true;
+        } else if (opPtr != null) {
+            opPtr.setValue(newOp.getIntegerValue().intValue());
+        }
+        return true;
+    }
+
+    public boolean combine(AMutableInt32 op, Value val, ExprTreeHolder tree, AMutableInt32 op1, Value val1,
+            ExprTreeHolder tree1, AMutableInt32 op2, Value val2, ExprTreeHolder tree2) throws HyracksDataException {
+        Operation newOp = new Operation();
+        Value dummy = new Value(); // undefined
+
+        // special don't care cases for logical operators with exactly one value
+        if ((tree1.getInnerTree() == null || tree2.getInnerTree() == null)
+                && (tree1.getInnerTree() != null || tree2.getInnerTree() != null)
+                && (op.getIntegerValue() == OpKind_LOGICAL_OR_OP || op.getIntegerValue() == OpKind_LOGICAL_AND_OP)) {
+            privateDoOperation(op.getIntegerValue().intValue(), tree1.getInnerTree() == null ? val1 : dummy,
+                    tree2.getInnerTree() == null ? val2 : dummy, dummy, true, true, false, val);
+            if (val.isBooleanValue()) {
+                tree.setInnerTree(null);
+                op.setValue(OpKind_NO_OP);
+                return true;
+            }
+        }
+
+        if (tree1.getInnerTree() == null && tree2.getInnerTree() == null) {
+            // left and rightsons are only values
+            privateDoOperation(op.getIntegerValue().intValue(), val1, val2, dummy, true, true, false, val);
+            tree.setInnerTree(null);
+            op.setValue(OpKind_NO_OP);
+            return true;
+        } else if (tree1.getInnerTree() == null
+                && (tree2.getInnerTree() != null && op2.getIntegerValue().intValue() == OpKind_NO_OP)) {
+            // leftson is a value, rightson is a tree
+            tree.setInnerTree(tree2.getInnerTree());
+            val.copyFrom(val1);
+            return true;
+        } else if (tree2.getInnerTree() == null
+                && (tree1.getInnerTree() != null && op1.getIntegerValue().intValue() == OpKind_NO_OP)) {
+            // rightson is a value, leftson is a tree
+            tree.setInnerTree(tree1.getInnerTree());
+            val.copyFrom(val2);
+            return true;
+        } else if ((tree1.getInnerTree() != null && op1.getIntegerValue().intValue() == OpKind_NO_OP)
+                && (tree2.getInnerTree() != null && op2.getIntegerValue().intValue() == OpKind_NO_OP)) {
+            // left and rightsons are trees only
+            if (null != (newOp = createOperation(op.getIntegerValue().intValue(), tree1, tree2))) {
+                return false;
+            }
+            tree.setInnerTree(newOp);
+            op.setValue(OpKind_NO_OP);
+            return true;
+        }
+
+        // cannot collapse values due to dissimilar ops
+        if ((op1.getIntegerValue().intValue() != OpKind_NO_OP || op2.getIntegerValue().intValue() != OpKind_NO_OP)
+                && !op.equals(op1) && !op.equals(op1)) {
+            // at least one of them returned a value and a tree, and parent does
+            // not share the same operation with either child
+            ExprTreeHolder newOp1 = new ExprTreeHolder();
+            ExprTreeHolder newOp2 = new ExprTreeHolder();
+
+            if (op1.getIntegerValue().intValue() != OpKind_NO_OP) {
+                newOp1.setInnerTree(Operation.createOperation(op1.getIntegerValue().intValue(), val1, tree1));
+            } else if (tree1.getInnerTree() != null) {
+                newOp1.setInnerTree(tree1.getInnerTree());
+            } else {
+                newOp1.setInnerTree(Literal.createLiteral(val1));
+            }
+
+            if (op2.getIntegerValue().intValue() != OpKind_NO_OP) {
+                newOp2.setInnerTree(Operation.createOperation(op2.getIntegerValue().intValue(), val2, tree2));
+            } else if (tree2.getInnerTree() != null) {
+                newOp2.setInnerTree(tree2);
+            } else {
+                newOp2.setInnerTree(Literal.createLiteral(val2));
+            }
+
+            if (newOp1.getInnerTree() == null || newOp2.getInnerTree() == null) {
+                tree.setInnerTree(null);
+                op.setValue(OpKind_NO_OP);
+                return false;
+            }
+            newOp = createOperation(op.getIntegerValue().intValue(), newOp1, newOp2);
+            if (newOp == null) {
+                tree.setInnerTree(null);
+                op.setValue(OpKind_NO_OP);
+                return false;
+            }
+            op.setValue(OpKind_NO_OP);
+            tree.setInnerTree(newOp);
+            return true;
+        }
+
+        if (op.equals(op1) && op.equals(op2)) {
+            // same operators on both children . since op!=NO_OP, neither are op1,
+            // op2.  so they both make tree and value contributions
+            newOp = createOperation(op.getIntegerValue().intValue(), tree1, tree2);
+            if (newOp == null) {
+                return false;
+            }
+            privateDoOperation(op.getIntegerValue().intValue(), val1, val2, dummy, true, true, false, val);
+            tree.setInnerTree(newOp);
+            return true;
+        } else if (op.equals(op1)) {
+            // leftson makes a tree,value contribution
+            if (tree2.getInnerTree() == null) {
+                // rightson makes a value contribution
+                privateDoOperation(op.getIntegerValue().intValue(), val1, val2, dummy, true, true, false, val);
+                tree.setInnerTree(tree1);
+                return true;
+            } else {
+                // rightson makes a tree contribution
+                Operation local_newOp = createOperation(op.getIntegerValue().intValue(), tree1, tree2);
+                if (local_newOp == null) {
+                    tree.setInnerTree(null);
+                    op.setValue(OpKind_NO_OP);
+                    return false;
+                }
+                val.copyFrom(val1);
+                tree.setInnerTree(local_newOp); // NAC - BUG FIX
+                return true;
+            }
+        } else if (op.equals(op2)) {
+            // rightson makes a tree,value contribution
+            if (tree1.getInnerTree() == null) {
+                // leftson makes a value contribution
+                privateDoOperation(op.getIntegerValue().intValue(), val1, val2, dummy, true, true, false, val);
+                tree.setInnerTree(tree2);
+                return true;
+            } else {
+                // leftson makes a tree contribution
+                Operation local_newOp = createOperation(op.getIntegerValue().intValue(), tree1, tree2);
+                if (local_newOp == null) {
+                    tree.setInnerTree(null);
+                    op.setValue(OpKind_NO_OP);
+                    return false;
+                }
+                tree.setInnerTree(local_newOp); // NAC BUG FIX
+                val.copyFrom(val2);
+                return true;
+            }
+        }
+
+        throw new HyracksDataException("Should not reach here");
+    }
+
+    public static int doComparison(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        ValueType vt1;
+        ValueType vt2;
+        ValueType coerceResult;
+
+        if (op == OpKind_META_EQUAL_OP || op == OpKind_META_NOT_EQUAL_OP) {
+            // do not do type promotions for the meta operators
+            vt1 = v1.getType();
+            vt2 = v2.getType();
+            coerceResult = vt1;
+        } else {
+            // do numerical type promotions --- other types/values are unchanged
+            coerceResult = coerceToNumber(v1, v2);
+            vt1 = v1.getType();
+            vt2 = v2.getType();
+        }
+
+        // perform comparison for =?= ; true iff same types and same values
+        if (op == OpKind_META_EQUAL_OP) {
+            if (vt1 != vt2) {
+                result.setBooleanValue(false);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            // undefined or error
+            if (vt1 == ValueType.UNDEFINED_VALUE || vt1 == ValueType.ERROR_VALUE) {
+                result.setBooleanValue(true);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+        }
+        // perform comparison for =!= ; negation of =?=
+        if (op == OpKind_META_NOT_EQUAL_OP) {
+            if (vt1 != vt2) {
+                result.setBooleanValue(true);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            // undefined or error
+            if (vt1 == ValueType.UNDEFINED_VALUE || vt1 == ValueType.ERROR_VALUE) {
+                result.setBooleanValue(false);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+        }
+
+        switch (coerceResult) {
+            // at least one of v1, v2 is a string
+            case STRING_VALUE:
+                // check if both are strings
+                if (vt1 != ValueType.STRING_VALUE || vt2 != ValueType.STRING_VALUE) {
+                    // comparison between strings and non-exceptional non-string
+                    // values is error
+                    result.setErrorValue();
+                    return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+                }
+                compareStrings(op, v1, v2, result);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+            case INTEGER_VALUE:
+                compareIntegers(op, v1, v2, result);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+            case REAL_VALUE:
+                compareReals(op, v1, v2, result);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+            case BOOLEAN_VALUE:
+                // check if both are bools
+                if (!v1.isBooleanValue() || !v2.isBooleanValue()) {
+                    result.setErrorValue();
+                    return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+                }
+                compareBools(op, v1, v2, result);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+            case LIST_VALUE:
+            case SLIST_VALUE:
+            case CLASSAD_VALUE:
+                result.setErrorValue();
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+            case ABSOLUTE_TIME_VALUE:
+                if (!v1.isAbsoluteTimeValue() || !v2.isAbsoluteTimeValue()) {
+                    result.setErrorValue();
+                    return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+                }
+                compareAbsoluteTimes(op, v1, v2, result);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+            case RELATIVE_TIME_VALUE:
+                if (!v1.isRelativeTimeValue() || !v2.isRelativeTimeValue()) {
+                    result.setErrorValue();
+                    return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+                }
+                compareRelativeTimes(op, v1, v2, result);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+            default:
+                // should not get here
+                throw new HyracksDataException("Should not get here");
+        }
+    }
+
+    public static int doArithmetic(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        AMutableInt64 i1 = new AMutableInt64(0);
+        AMutableInt64 i2 = new AMutableInt64(0);
+        ClassAdTime t1 = new ClassAdTime();
+        AMutableDouble r1 = new AMutableDouble(0);
+        MutableBoolean b1 = new MutableBoolean();
+
+        // ensure the operands have arithmetic types
+        if ((!v1.isIntegerValue() && !v1.isRealValue() && !v1.isAbsoluteTimeValue() && !v1.isRelativeTimeValue()
+                && !v1.isBooleanValue())
+                || (op != OpKind_UNARY_MINUS_OP && !v2.isBooleanValue() && !v2.isIntegerValue() && !v2.isRealValue()
+                        && !v2.isAbsoluteTimeValue() && !v2.isRelativeTimeValue())) {
+            result.setErrorValue();
+            return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+        }
+
+        // take care of the unary arithmetic operators
+        if (op == OpKind_UNARY_MINUS_OP) {
+            if (v1.isIntegerValue(i1)) {
+                result.setIntegerValue((-1L) * i1.getLongValue());
+                return SigValues.SIG_CHLD1.ordinal();
+            } else if (v1.isRealValue(r1)) {
+                result.setRealValue((-1) * r1.getDoubleValue());
+                return SigValues.SIG_CHLD1.ordinal();
+            } else if (v1.isRelativeTimeValue(t1)) {
+                t1.setValue((-1) * t1.getTimeInMillis());
+                result.setRelativeTimeValue(t1);
+                return (SigValues.SIG_CHLD1.ordinal());
+            } else if (v1.isBooleanValue(b1)) {
+                result.setBooleanValue(!b1.booleanValue());
+            } else if (v1.isExceptional()) {
+                // undefined or error --- same as operand
+                result.copyFrom(v1);
+                return SigValues.SIG_CHLD1.ordinal();
+            }
+            // unary minus not defined on any other operand type
+            result.setErrorValue();
+            return (SigValues.SIG_CHLD1.ordinal());
+        }
+
+        // perform type promotions and proceed with arithmetic
+        switch (coerceToNumber(v1, v2)) {
+            case INTEGER_VALUE:
+                v1.isIntegerValue(i1);
+                v2.isIntegerValue(i2);
+                switch (op) {
+                    case OpKind_ADDITION_OP:
+                        result.setIntegerValue(i1.getLongValue() + i2.getLongValue());
+                        return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+                    case OpKind_SUBTRACTION_OP:
+                        result.setIntegerValue(i1.getLongValue() - i2.getLongValue());
+                        return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+                    case OpKind_MULTIPLICATION_OP:
+                        result.setIntegerValue(i1.getLongValue() * i2.getLongValue());
+                        return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+                    case OpKind_DIVISION_OP:
+                        if (i2.getLongValue() != 0L) {
+                            result.setIntegerValue(i1.getLongValue() / i2.getLongValue());
+                        } else {
+                            result.setErrorValue();
+                        }
+                        return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+                    case OpKind_MODULUS_OP:
+                        if (i2.getLongValue() != 0) {
+                            result.setIntegerValue(i1.getLongValue() % i2.getLongValue());
+                        } else {
+                            result.setErrorValue();
+                        }
+                        return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+
+                    default:
+                        // should not reach here
+                        throw new HyracksDataException("Should not get here");
+                }
+
+            case REAL_VALUE: {
+                return (doRealArithmetic(op, v1, v2, result));
+            }
+            case ABSOLUTE_TIME_VALUE:
+            case RELATIVE_TIME_VALUE: {
+                return (doTimeArithmetic(op, v1, v2, result));
+            }
+            default:
+                // should not get here
+                throw new HyracksDataException("Should not get here");
+        }
+    }
+
+    public static int doLogical(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        MutableBoolean b1 = new MutableBoolean();
+        MutableBoolean b2 = new MutableBoolean();
+
+        // first coerece inputs to boolean if they are considered equivalent
+        if (!v1.isBooleanValue(b1) && v1.isBooleanValueEquiv(b1)) {
+            v1.setBooleanValue(b1.booleanValue());
+        }
+        if (!v2.isBooleanValue(b2) && v2.isBooleanValueEquiv(b2)) {
+            v2.setBooleanValue(b2);
+        }
+
+        ValueType vt1 = v1.getType();
+        ValueType vt2 = v2.getType();
+
+        if (vt1 != ValueType.UNDEFINED_VALUE && vt1 != ValueType.ERROR_VALUE && vt1 != ValueType.BOOLEAN_VALUE) {
+            result.setErrorValue();
+            return SigValues.SIG_CHLD1.ordinal();
+        }
+        if (vt2 != ValueType.UNDEFINED_VALUE && vt2 != ValueType.ERROR_VALUE && vt2 != ValueType.BOOLEAN_VALUE) {
+            result.setErrorValue();
+            return SigValues.SIG_CHLD2.ordinal();
+        }
+
+        // handle unary operator
+        if (op == OpKind_LOGICAL_NOT_OP) {
+            if (vt1 == ValueType.BOOLEAN_VALUE) {
+                result.setBooleanValue(!b1.booleanValue());
+            } else {
+                result.copyFrom(v1);
+            }
+            return SigValues.SIG_CHLD1.ordinal();
+        }
+
+        if (op == OpKind_LOGICAL_OR_OP) {
+            if (vt1 == ValueType.BOOLEAN_VALUE && b1.booleanValue()) {
+                result.setBooleanValue(true);
+                return SigValues.SIG_CHLD1.ordinal();
+            } else if (vt1 == ValueType.ERROR_VALUE) {
+                result.setErrorValue();
+                return SigValues.SIG_CHLD1.ordinal();
+            } else if (vt1 == ValueType.BOOLEAN_VALUE && !b1.booleanValue()) {
+                result.copyFrom(v2);
+            } else if (vt2 != ValueType.BOOLEAN_VALUE) {
+                result.copyFrom(v2);
+            } else if (b2.booleanValue()) {
+                result.setBooleanValue(true);
+            } else {
+                result.setUndefinedValue();
+            }
+            return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+        } else if (op == OpKind_LOGICAL_AND_OP) {
+            if (vt1 == ValueType.BOOLEAN_VALUE && !b1.booleanValue()) {
+                result.setBooleanValue(false);
+                return SigValues.SIG_CHLD1.ordinal();
+            } else if (vt1 == ValueType.ERROR_VALUE) {
+                result.setErrorValue();
+                return SigValues.SIG_CHLD1.ordinal();
+            } else if (vt1 == ValueType.BOOLEAN_VALUE && b1.booleanValue()) {
+                result.copyFrom(v2);
+            } else if (vt2 != ValueType.BOOLEAN_VALUE) {
+                result.copyFrom(v2);
+            } else if (!b2.booleanValue()) {
+                result.setBooleanValue(false);
+            } else {
+                result.setUndefinedValue();
+            }
+            return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+        }
+
+        throw new HyracksDataException("Shouldn't reach here");
+    }
+
+    public static int doBitwise(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        AMutableInt64 i1 = new AMutableInt64(0);
+        AMutableInt64 i2 = new AMutableInt64(0);
+
+        // bitwise operations are defined only on integers
+        if (op == OpKind_BITWISE_NOT_OP) {
+            if (!v1.isIntegerValue(i1)) {
+                result.setErrorValue();
+                return SigValues.SIG_CHLD1.ordinal();
+            }
+        } else if (!v1.isIntegerValue(i1) || !v2.isIntegerValue(i2)) {
+            result.setErrorValue();
+            return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+        }
+
+        switch (op) {
+            case OpKind_BITWISE_NOT_OP:
+                result.setIntegerValue(~(i1.getLongValue()));
+                break;
+            case OpKind_BITWISE_OR_OP:
+                result.setIntegerValue(i1.getLongValue() | i2.getLongValue());
+                break;
+            case OpKind_BITWISE_AND_OP:
+                result.setIntegerValue(i1.getLongValue() & i2.getLongValue());
+                break;
+            case OpKind_BITWISE_XOR_OP:
+                result.setIntegerValue(i1.getLongValue() ^ i2.getLongValue());
+                break;
+            case OpKind_LEFT_SHIFT_OP:
+                result.setIntegerValue(i1.getLongValue() << i2.getLongValue());
+                break;
+
+            case OpKind_URIGHT_SHIFT_OP:
+                //               if (i1 >= 0) {
+                // Could probably just use >>>
+                // sign bit is not on;  >> will work fine
+                result.setIntegerValue(i1.getLongValue() >>> i2.getLongValue());
+                break;
+            //               } else {
+            // sign bit is on
+            //                  val.setValue(i1 >> 1);      // shift right 1; the sign bit *may* be on
+            //                  val.setValue(val.getLongValue() & (~signMask)); // Clear the sign bit for sure
+            //                  val.setValue(val.getLongValue() >>(i2.getLongValue() - 1));   // shift remaining Number of positions
+            //                  result.SetIntegerValue (val.getLongValue());
+            //                  break;
+            //               }
+
+            case OpKind_RIGHT_SHIFT_OP:
+                // sign bit is off;  >> will work fine
+                result.setIntegerValue(i1.getLongValue() >> i2.getLongValue());
+                break;
+
+            default:
+                // should not get here
+                throw new HyracksDataException("Should not get here");
+        }
+
+        if (op == OpKind_BITWISE_NOT_OP) {
+            return SigValues.SIG_CHLD1.ordinal();
+        }
+
+        return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+    }
+
+    //out of domain value
+    public static final int EDOM = 33;
+
+    public static int doRealArithmetic(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        AMutableDouble r1 = new AMutableDouble(0);
+        AMutableDouble r2 = new AMutableDouble(0);
+        double comp = 0;
+
+        // we want to prevent FPE and set the ERROR value on the result; on Unix
+        // trap sigfpe and set the ClassAdExprFPE flag to true; on NT check the
+        // result against HUGE_VAL.  check errno for EDOM and ERANGE for kicks.
+
+        v1.isRealValue(r1);
+        v2.isRealValue(r2);
+        int errno = 0;
+        switch (op) {
+            case OpKind_ADDITION_OP:
+                comp = r1.getDoubleValue() + r2.getDoubleValue();
+                break;
+            case OpKind_SUBTRACTION_OP:
+                comp = r1.getDoubleValue() - r2.getDoubleValue();
+                break;
+            case OpKind_MULTIPLICATION_OP:
+                comp = r1.getDoubleValue() * r2.getDoubleValue();
+                break;
+            case OpKind_DIVISION_OP:
+                comp = r1.getDoubleValue() / r2.getDoubleValue();
+                break;
+            case OpKind_MODULUS_OP:
+                errno = EDOM;
+                break;
+            default:
+                // should not reach here
+                throw new HyracksDataException("Should not get here");
+        }
+
+        // check if anything bad happened
+        if (errno == EDOM) {
+            result.setErrorValue();
+        } else {
+            result.setRealValue(comp);
+        }
+        return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+    }
+
+    public static int doTimeArithmetic(int op, Value v1, Value v2, Value result) {
+        ClassAdTime asecs1 = new ClassAdTime();
+        ClassAdTime asecs2 = new ClassAdTime();
+        ValueType vt1 = v1.getType();
+        ValueType vt2 = v2.getType();
+
+        // addition
+        if (op == OpKind_ADDITION_OP) {
+            if (vt1 == ValueType.ABSOLUTE_TIME_VALUE && vt2 == ValueType.RELATIVE_TIME_VALUE) {
+                v1.isAbsoluteTimeValue(asecs1);
+                v2.isRelativeTimeValue(asecs2);
+                asecs1.setValue(asecs1.getTimeInMillis() + asecs2.getTimeInMillis());
+                result.setAbsoluteTimeValue(asecs1);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            if (vt1 == ValueType.RELATIVE_TIME_VALUE && vt2 == ValueType.ABSOLUTE_TIME_VALUE) {
+                v1.isRelativeTimeValue(asecs1);
+                v2.isAbsoluteTimeValue(asecs2);
+                asecs2.setValue(asecs1.getTimeInMillis() + asecs2.getTimeInMillis());
+                result.setAbsoluteTimeValue(asecs2);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            if (vt1 == ValueType.RELATIVE_TIME_VALUE && vt2 == ValueType.RELATIVE_TIME_VALUE) {
+                v1.isRelativeTimeValue(asecs1);
+                v2.isRelativeTimeValue(asecs2);
+                result.setRelativeTimeValue(asecs1.plus(asecs2.getRelativeTime(), false));
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+        }
+
+        if (op == OpKind_SUBTRACTION_OP) {
+            if (vt1 == ValueType.ABSOLUTE_TIME_VALUE && vt2 == ValueType.ABSOLUTE_TIME_VALUE) {
+                v1.isAbsoluteTimeValue(asecs1);
+                v2.isAbsoluteTimeValue(asecs2);
+                result.setRelativeTimeValue(asecs1.subtract(asecs2, false));
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            if (vt1 == ValueType.ABSOLUTE_TIME_VALUE && vt2 == ValueType.RELATIVE_TIME_VALUE) {
+                v1.isAbsoluteTimeValue(asecs1);
+                v2.isRelativeTimeValue(asecs2);
+                asecs1.setValue(asecs1.getTimeInMillis() - asecs2.getRelativeTime());
+                result.setAbsoluteTimeValue(asecs1);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            if (vt1 == ValueType.RELATIVE_TIME_VALUE && vt2 == ValueType.RELATIVE_TIME_VALUE) {
+                v1.isRelativeTimeValue(asecs1);
+                v2.isRelativeTimeValue(asecs2);
+                result.setRelativeTimeValue(asecs1.subtract(asecs2));
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+        }
+
+        if (op == OpKind_MULTIPLICATION_OP || op == OpKind_DIVISION_OP) {
+            if (vt1 == ValueType.RELATIVE_TIME_VALUE && vt2 == ValueType.INTEGER_VALUE) {
+                AMutableInt64 num = new AMutableInt64(0);
+                ClassAdTime msecs = new ClassAdTime();
+                v1.isRelativeTimeValue(asecs1);
+                v2.isIntegerValue(num);
+                if (op == OpKind_MULTIPLICATION_OP) {
+                    msecs.setValue(asecs1.multiply(num.getLongValue(), false));
+                } else {
+                    msecs.setValue(asecs1.divide(num.getLongValue(), false));
+                }
+                result.setRelativeTimeValue(msecs);
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            if (vt1 == ValueType.RELATIVE_TIME_VALUE && vt2 == ValueType.REAL_VALUE) {
+                AMutableDouble num = new AMutableDouble(0);
+                AMutableDouble msecs = new AMutableDouble(0);
+                v1.isRelativeTimeValue(asecs1);
+                v2.isRealValue(num);
+                if (op == OpKind_MULTIPLICATION_OP) {
+                    msecs.setValue(asecs1.getRelativeTime() * num.getDoubleValue());
+                } else {
+                    msecs.setValue(asecs1.getRelativeTime() * num.getDoubleValue());
+                }
+                result.setRelativeTimeValue(new ClassAdTime(1000L * ((long) msecs.getDoubleValue()), false));
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            if (vt1 == ValueType.INTEGER_VALUE && vt2 == ValueType.RELATIVE_TIME_VALUE
+                    && op == OpKind_MULTIPLICATION_OP) {
+                AMutableInt64 num = new AMutableInt64(0);
+                v1.isIntegerValue(num);
+                v2.isRelativeTimeValue(asecs1);
+                result.setRelativeTimeValue(new ClassAdTime(num.getLongValue() * asecs1.getRelativeTime(), false));
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+
+            if (vt2 == ValueType.RELATIVE_TIME_VALUE && vt1 == ValueType.REAL_VALUE && op == OpKind_MULTIPLICATION_OP) {
+                AMutableDouble num = new AMutableDouble(0);
+                v1.isRelativeTimeValue(asecs1);
+                v2.isRealValue(num);
+                result.setRelativeTimeValue(
+                        new ClassAdTime((long) (asecs1.getRelativeTime() * num.getDoubleValue()), false));
+                return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+            }
+        }
+        // no other operations are supported on times
+        result.setErrorValue();
+        return (SigValues.SIG_CHLD1.ordinal() | SigValues.SIG_CHLD2.ordinal());
+    }
+
+    public static void compareStrings(int op, Value v1, Value v2, Value result) {
+        AMutableCharArrayString s1 = new AMutableCharArrayString();
+        AMutableCharArrayString s2 = new AMutableCharArrayString();
+        int cmp;
+        v1.isStringValue(s1);
+        v2.isStringValue(s2);
+        result.setBooleanValue(false);
+        if (op == OpKind_META_EQUAL_OP || op == OpKind_META_NOT_EQUAL_OP) {
+            cmp = s1.compareTo(s2);
+        } else {
+            cmp = s1.compareToIgnoreCase(s2);
+        }
+        if (cmp < 0) {
+            // s1 < s2
+            if (op == OpKind_LESS_THAN_OP || op == OpKind_LESS_OR_EQUAL_OP || op == OpKind_META_NOT_EQUAL_OP
+                    || op == OpKind_NOT_EQUAL_OP) {
+                result.setBooleanValue(true);
+            }
+        } else if (cmp == 0) {
+            // s1 == s2
+            if (op == OpKind_LESS_OR_EQUAL_OP || op == OpKind_META_EQUAL_OP || op == OpKind_EQUAL_OP
+                    || op == OpKind_GREATER_OR_EQUAL_OP) {
+                result.setBooleanValue(true);
+            }
+        } else {
+            // s1 > s2
+            if (op == OpKind_GREATER_THAN_OP || op == OpKind_GREATER_OR_EQUAL_OP || op == OpKind_META_NOT_EQUAL_OP
+                    || op == OpKind_NOT_EQUAL_OP) {
+                result.setBooleanValue(true);
+            }
+        }
+    }
+
+    public static void compareAbsoluteTimes(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        ClassAdTime asecs1 = new ClassAdTime();
+        ClassAdTime asecs2 = new ClassAdTime();
+        boolean compResult = false;
+        v1.isAbsoluteTimeValue(asecs1);
+        v2.isAbsoluteTimeValue(asecs2);
+        switch (op) {
+            case OpKind_LESS_THAN_OP:
+                compResult = (asecs1.getTimeInMillis() < asecs2.getTimeInMillis());
+                break;
+            case OpKind_LESS_OR_EQUAL_OP:
+                compResult = (asecs1.getTime() <= asecs2.getTime());
+                break;
+            case OpKind_EQUAL_OP:
+                compResult = (asecs1.getTime() == asecs2.getTime());
+                break;
+            case OpKind_META_EQUAL_OP:
+                compResult = (asecs1.getTime() == asecs2.getTime()) && (asecs1.getOffset() == asecs2.getOffset());
+                break;
+            case OpKind_NOT_EQUAL_OP:
+                compResult = (asecs1.getTime() != asecs2.getTime());
+                break;
+            case OpKind_META_NOT_EQUAL_OP:
+                compResult = (asecs1.getTime() != asecs2.getTime()) || (asecs1.getOffset() != asecs2.getOffset());
+                break;
+            case OpKind_GREATER_THAN_OP:
+                compResult = (asecs1.getTime() > asecs2.getTime());
+                break;
+            case OpKind_GREATER_OR_EQUAL_OP:
+                compResult = (asecs1.getTime() >= asecs2.getTime());
+                break;
+            default:
+                // should not get here
+                throw new HyracksDataException("Should not get here");
+        }
+        result.setBooleanValue(compResult);
+    }
+
+    public static void compareRelativeTimes(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        ClassAdTime rsecs1 = new ClassAdTime();
+        ClassAdTime rsecs2 = new ClassAdTime();
+        boolean compResult = false;
+
+        v1.isRelativeTimeValue(rsecs1);
+        v2.isRelativeTimeValue(rsecs2);
+
+        switch (op) {
+            case OpKind_LESS_THAN_OP:
+                compResult = (rsecs1.getRelativeTime() < rsecs2.getRelativeTime());
+                break;
+
+            case OpKind_LESS_OR_EQUAL_OP:
+                compResult = (rsecs1.getRelativeTime() <= rsecs2.getRelativeTime());
+                break;
+
+            case OpKind_EQUAL_OP:
+            case OpKind_META_EQUAL_OP:
+                compResult = (rsecs1.getRelativeTime() == rsecs2.getRelativeTime());
+                break;
+
+            case OpKind_NOT_EQUAL_OP:
+            case OpKind_META_NOT_EQUAL_OP:
+                compResult = (rsecs1.getRelativeTime() != rsecs2.getRelativeTime());
+                break;
+
+            case OpKind_GREATER_THAN_OP:
+                compResult = (rsecs1.getRelativeTime() > rsecs2.getRelativeTime());
+                break;
+
+            case OpKind_GREATER_OR_EQUAL_OP:
+                compResult = (rsecs1.getRelativeTime() >= rsecs2.getRelativeTime());
+                break;
+
+            default:
+                // should not get here
+                throw new HyracksDataException("Should not get here");
+        }
+        result.setBooleanValue(compResult);
+    }
+
+    public static void compareBools(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        MutableBoolean b1 = new MutableBoolean();
+        MutableBoolean b2 = new MutableBoolean();
+        boolean compResult = false;
+        v1.isBooleanValue(b1);
+        v2.isBooleanValue(b2);
+
+        switch (op) {
+            case OpKind_LESS_THAN_OP:
+                compResult = (b1.compareTo(b2) < 0);
+                break;
+            case OpKind_LESS_OR_EQUAL_OP:
+                compResult = (b1.compareTo(b2) <= 0);
+                break;
+            case OpKind_EQUAL_OP:
+                compResult = (b1.booleanValue() == b2.booleanValue());
+                break;
+            case OpKind_META_EQUAL_OP:
+                compResult = (b1.booleanValue() == b2.booleanValue());
+                break;
+            case OpKind_NOT_EQUAL_OP:
+                compResult = (b1.booleanValue() != b2.booleanValue());
+                break;
+            case OpKind_META_NOT_EQUAL_OP:
+                compResult = (b1.booleanValue() != b2.booleanValue());
+                break;
+            case OpKind_GREATER_THAN_OP:
+                compResult = (b1.compareTo(b2) > 0);
+                break;
+            case OpKind_GREATER_OR_EQUAL_OP:
+                compResult = (b1.compareTo(b2) >= 0);
+                break;
+            default:
+                // should not get here
+                throw new HyracksDataException("Should not get here");
+        }
+        result.setBooleanValue(compResult);
+    }
+
+    public static void compareIntegers(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        AMutableInt64 i1 = new AMutableInt64(0);
+        AMutableInt64 i2 = new AMutableInt64(0);
+        boolean compResult = false;
+        v1.isIntegerValue(i1);
+        v2.isIntegerValue(i2);
+        switch (op) {
+            case OpKind_LESS_THAN_OP:
+                compResult = (i1.getLongValue() < i2.getLongValue());
+                break;
+            case OpKind_LESS_OR_EQUAL_OP:
+                compResult = (i1.getLongValue() <= i2.getLongValue());
+                break;
+            case OpKind_EQUAL_OP:
+                compResult = (i1.getLongValue() == i2.getLongValue());
+                break;
+            case OpKind_META_EQUAL_OP:
+                compResult = (i1.getLongValue() == i2.getLongValue());
+                break;
+            case OpKind_NOT_EQUAL_OP:
+                compResult = (i1.getLongValue() != i2.getLongValue());
+                break;
+            case OpKind_META_NOT_EQUAL_OP:
+                compResult = (i1.getLongValue() != i2.getLongValue());
+                break;
+            case OpKind_GREATER_THAN_OP:
+                compResult = (i1.getLongValue() > i2.getLongValue());
+                break;
+            case OpKind_GREATER_OR_EQUAL_OP:
+                compResult = (i1.getLongValue() >= i2.getLongValue());
+                break;
+            default:
+                // should not get here
+                throw new HyracksDataException("Should not get here");
+        }
+        result.setBooleanValue(compResult);
+    }
+
+    public static void compareReals(int op, Value v1, Value v2, Value result) throws HyracksDataException {
+        AMutableDouble r1 = new AMutableDouble(0);
+        AMutableDouble r2 = new AMutableDouble(0);
+        boolean compResult = false;
+
+        v1.isRealValue(r1);
+        v2.isRealValue(r2);
+
+        switch (op) {
+            case OpKind_LESS_THAN_OP:
+                compResult = (r1.getDoubleValue() < r2.getDoubleValue());
+                break;
+            case OpKind_LESS_OR_EQUAL_OP:
+                compResult = (r1.getDoubleValue() <= r2.getDoubleValue());
+                break;
+            case OpKind_EQUAL_OP:
+                compResult = (r1.getDoubleValue() == r2.getDoubleValue());
+                break;
+            case OpKind_META_EQUAL_OP:
+                compResult = (r1.getDoubleValue() == r2.getDoubleValue());
+                break;
+            case OpKind_NOT_EQUAL_OP:
+                compResult = (r1.getDoubleValue() != r2.getDoubleValue());
+                break;
+            case OpKind_META_NOT_EQUAL_OP:
+                compResult = (r1.getDoubleValue() != r2.getDoubleValue());
+                break;
+            case OpKind_GREATER_THAN_OP:
+                compResult = (r1.getDoubleValue() > r2.getDoubleValue());
+                break;
+            case OpKind_GREATER_OR_EQUAL_OP:
+                compResult = (r1.getDoubleValue() >= r2.getDoubleValue());
+                break;
+            default:
+                // should not get here
+                throw new HyracksDataException("Should not get here");
+        }
+        result.setBooleanValue(compResult);
+    }
+
+    // This function performs type promotions so that both v1 and v2 are of the
+    // same numerical type: (v1 and v2 are not ERROR or UNDEFINED)
+    //  + if both v1 and v2 are Numbers and of the same type, return type
+    //  + if v1 is an int and v2 is a real, convert v1 to real; return REAL_VALUE
+    //  + if v1 is a real and v2 is an int, convert v2 to real; return REAL_VALUE
+    public static ValueType coerceToNumber(Value v1, Value v2) {
+        AMutableInt64 i = new AMutableInt64(0);
+        AMutableDouble r = new AMutableDouble(0);
+        MutableBoolean b = new MutableBoolean();
+
+        // either of v1, v2 not numerical?
+        if (v1.isClassAdValue() || v2.isClassAdValue())
+            return ValueType.CLASSAD_VALUE;
+        if (v1.isListValue() || v2.isListValue())
+            return ValueType.LIST_VALUE;
+        if (v1.isStringValue() || v2.isStringValue())
+            return ValueType.STRING_VALUE;
+        if (v1.isUndefinedValue() || v2.isUndefinedValue())
+            return ValueType.UNDEFINED_VALUE;
+        if (v1.isErrorValue() || v2.isErrorValue())
+            return ValueType.ERROR_VALUE;
+        if (v1.isAbsoluteTimeValue() || v2.isAbsoluteTimeValue())
+            return ValueType.ABSOLUTE_TIME_VALUE;
+        if (v1.isRelativeTimeValue() || v2.isRelativeTimeValue())
+            return ValueType.RELATIVE_TIME_VALUE;
+
+        // promote booleans to integers
+        if (v1.isBooleanValue(b)) {
+            if (b.booleanValue()) {
+                v1.setIntegerValue(1);
+            } else {
+                v1.setIntegerValue(0);
+            }
+        }
+
+        if (v2.isBooleanValue(b)) {
+            if (b.booleanValue()) {
+                v2.setIntegerValue(1);
+            } else {
+                v2.setIntegerValue(0);
+            }
+        }
+
+        // both v1 and v2 of same numerical type
+        if (v1.isIntegerValue(i) && v2.isIntegerValue(i))
+            return ValueType.INTEGER_VALUE;
+        if (v1.isRealValue(r) && v2.isRealValue(r))
+            return ValueType.REAL_VALUE;
+
+        // type promotions required
+        if (v1.isIntegerValue(i) && v2.isRealValue(r))
+            v1.setRealValue(i.getLongValue());
+        else if (v1.isRealValue(r) && v2.isIntegerValue(i))
+            v2.setRealValue(i.getLongValue());
+
+        return ValueType.REAL_VALUE;
+    }
+
+    public Operation(int op, ExprTreeHolder e1, ExprTreeHolder e2, ExprTreeHolder e3) {
+        this.opKind = op;
+        this.child1 = e1 == null ? null : e1.self();
+        this.child2 = e2 == null ? null : e2.self();
+        this.child3 = e3 == null ? null : e3.self();
+    }
+
+    public static Operation createOperation(int op, ExprTree e1, ExprTree e2, ExprTree e3) {
+        Operation opnode = new Operation();
+        opnode.opKind = op;
+        opnode.child1 = e1 == null ? null : e1.self();
+        opnode.child2 = e2 == null ? null : e2.self();
+        opnode.child3 = e3 == null ? null : e3.self();
+        return opnode;
+    }
+
+    public static void createOperation(int op, ExprTree e1, ExprTree e2, ExprTree e3, Operation opnode) {
+        opnode.opKind = op;
+        opnode.child1 = e1 == null ? null : e1.self();
+        opnode.child2 = e2 == null ? null : e2.self();
+        opnode.child3 = e3 == null ? null : e3.self();
+    }
+
+    public void getComponents(AMutableInt32 op, ExprTreeHolder e1, ExprTreeHolder e2, ExprTreeHolder e3) {
+        op.setValue(opKind);
+        e1.setInnerTree(child1);
+        e2.setInnerTree(child2);
+        e3.setInnerTree(child3);
+    }
+
+    public static Operation createOperation(int op, Value val, ExprTreeHolder tree) throws HyracksDataException {
+        if (tree.getInnerTree() == null) {
+            return null;
+        }
+        Literal lit = Literal.createLiteral(val);
+        if (lit == null) {
+            return null;
+        }
+        Operation newOp = createOperation(op, lit, tree);
+        return newOp;
+    }
+
+    public static Operation createOperation(int op, ExprTreeHolder tree, Value val) throws HyracksDataException {
+        if (tree.getInnerTree() == null) {
+            return null;
+        }
+        Literal lit = Literal.createLiteral(val);
+        if (lit == null) {
+            return null;
+        }
+        Operation newOp = createOperation(op, lit, tree);
+        return newOp;
+    }
+
+    public boolean flattenSpecials(EvalState state, Value val, ExprTreeHolder tree) throws HyracksDataException {
+        ExprTreeHolder fChild1 = new ExprTreeHolder();
+        ExprTreeHolder fChild2 = new ExprTreeHolder();
+        ExprTreeHolder fChild3 = new ExprTreeHolder();
+        Value eval1 = new Value();
+        Value eval2 = new Value();
+        Value eval3 = new Value();
+
+        switch (opKind) {
+            case OpKind_UNARY_PLUS_OP:
+            case OpKind_UNARY_MINUS_OP:
+            case OpKind_PARENTHESES_OP:
+            case OpKind_LOGICAL_NOT_OP:
+            case OpKind_BITWISE_NOT_OP:
+                if (!child1.publicFlatten(state, eval1, fChild1)) {
+                    tree.setInnerTree(null);
+                    return false;
+                }
+                if (fChild1.getInnerTree() != null) {
+                    tree.setInnerTree(Operation.createOperation(opKind, fChild1));
+                    return (tree.getInnerTree() != null);
+                } else {
+                    privateDoOperation(opKind, eval1, null, null, true, false, false, val);
+                    tree.setInnerTree(null);
+                    eval1.clear();
+                    return true;
+                }
+            case OpKind_TERNARY_OP:
+                // Flatten the selector expression
+                if (!child1.publicFlatten(state, eval1, fChild1)) {
+                    tree.setInnerTree(null);
+                    return false;
+                }
+
+                // check if selector expression collapsed to a non-undefined value
+                if (fChild1.getInnerTree() == null && !eval1.isUndefinedValue()) {
+                    MutableBoolean b = new MutableBoolean();
+                    // if the selector is not boolean-equivalent, propagate error
+                    if (!eval1.isBooleanValueEquiv(b)) {
+                        val.setErrorValue();
+                        eval1.clear();
+                        tree.setInnerTree(null);
+                        return true;
+                    }
+
+                    // eval1 is either a real or an integer
+                    if (b.booleanValue()) {
+                        return child2.publicFlatten(state, val, tree);
+                    } else {
+                        return child3.publicFlatten(state, val, tree);
+                    }
+                } else {
+                    // Flatten arms of the if expression
+                    if (!child2.publicFlatten(state, eval2, fChild2) || !child3.publicFlatten(state, eval3, fChild3)) {
+                        // clean up
+                        tree.setInnerTree(null);
+                        return false;
+                    }
+
+                    // if any arm collapsed into a value, make it a Literal
+                    if (fChild2.getInnerTree() == null)
+                        fChild2.setInnerTree(Literal.createLiteral(eval2));
+                    if (fChild3.getInnerTree() == null)
+                        fChild3.setInnerTree(Literal.createLiteral(eval3));
+                    if (fChild2.getInnerTree() == null || fChild3.getInnerTree() == null) {
+                        tree.setInnerTree(null);;
+                        return false;
+                    }
+
+                    // fChild1 may be NULL if child1 Flattened to UNDEFINED
+                    if (fChild1.getInnerTree() == null) {
+                        fChild1.setInnerTree(child1.copy());
+                    }
+
+                    tree.setInnerTree(Operation.createOperation(opKind, fChild1, fChild2, fChild3));
+                    if (tree.getInnerTree() == null) {
+                        return false;
+                    }
+                    return true;
+                }
+            case OpKind_SUBSCRIPT_OP:
+                // Flatten both arguments
+                if (!child1.publicFlatten(state, eval1, fChild1) || !child2.publicFlatten(state, eval2, fChild2)) {
+                    tree.setInnerTree(null);
+                    return false;
+                }
+
+                // if both arguments Flattened to values, Evaluate now
+                if (fChild1.getInnerTree() == null && fChild2.getInnerTree() == null) {
+                    privateDoOperation(opKind, eval1, eval2, null, true, true, false, val);
+                    tree.setInnerTree(null);
+                    return true;
+                }
+
+                // otherwise convert Flattened values into literals
+                if (fChild1.getInnerTree() == null)
+                    fChild1.setInnerTree(Literal.createLiteral(eval1));
+                if (fChild2.getInnerTree() == null)
+                    fChild2.setInnerTree(Literal.createLiteral(eval2));
+                if (fChild1.getInnerTree() == null || fChild2.getInnerTree() == null) {
+                    tree.setInnerTree(null);
+                    return false;
+                }
+
+                tree.setInnerTree(Operation.createOperation(opKind, fChild1, fChild2));
+                if (tree.getInnerTree() == null) {
+                    return false;
+                }
+                return true;
+
+            default:
+                throw new HyracksDataException("Should not get here");
+        }
+    }
+
+    public static boolean isStrictOperator(int op) {
+        switch (op) {
+            case OpKind_META_EQUAL_OP:
+            case OpKind_META_NOT_EQUAL_OP:
+            case OpKind_LOGICAL_AND_OP:
+            case OpKind_LOGICAL_OR_OP:
+            case OpKind_TERNARY_OP:
+                return false;
+            default:
+                return true;
+        }
+    }
+
+    // get precedence levels for operators (see K&R p.53 )
+    public static int precedenceLevel(int op) {
+        switch (op) {
+            case OpKind_SUBSCRIPT_OP:
+                return (12);
+
+            case OpKind_LOGICAL_NOT_OP:
+            case OpKind_BITWISE_NOT_OP:
+            case OpKind_UNARY_PLUS_OP:
+            case OpKind_UNARY_MINUS_OP:
+                return (11);
+
+            case OpKind_MULTIPLICATION_OP:
+            case OpKind_DIVISION_OP:
+            case OpKind_MODULUS_OP:
+                return (10);
+
+            case OpKind_ADDITION_OP:
+            case OpKind_SUBTRACTION_OP:
+                return (9);
+
+            case OpKind_LEFT_SHIFT_OP:
+            case OpKind_RIGHT_SHIFT_OP:
+            case OpKind_URIGHT_SHIFT_OP:
+                return (8);
+
+            case OpKind_LESS_THAN_OP:
+            case OpKind_LESS_OR_EQUAL_OP:
+            case OpKind_GREATER_OR_EQUAL_OP:
+            case OpKind_GREATER_THAN_OP:
+                return (7);
+
+            case OpKind_NOT_EQUAL_OP:
+            case OpKind_EQUAL_OP:
+            case OpKind_IS_OP:
+            case OpKind_ISNT_OP:
+                return (6);
+
+            case OpKind_BITWISE_AND_OP:
+                return (5);
+
+            case OpKind_BITWISE_XOR_OP:
+                return (4);
+
+            case OpKind_BITWISE_OR_OP:
+                return (3);
+
+            case OpKind_LOGICAL_AND_OP:
+                return (2);
+
+            case OpKind_LOGICAL_OR_OP:
+                return (1);
+
+            case OpKind_TERNARY_OP:
+                return (0);
+            default:
+                return (-1);
+        }
+    }
+
+    @Override
+    public void reset() {
+        opKind = OpKind_NO_OP;
+        if (child1 != null)
+            child1.reset();
+        if (child2 != null)
+            child2.reset();
+        if (child3 != null)
+            child3.reset();
+    }
+}